hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
b938c3b5c4c2d2ce7541ed214862d3361f5f4436 | 79,694 | use ic_base_types::NumSeconds;
use ic_interfaces::execution_environment::{
AvailableMemory, CanisterOutOfCyclesError, ExecutionParameters, HypervisorError,
HypervisorResult, SubnetAvailableMemory, SystemApi, TrapCode,
};
use ic_logger::replica_logger::no_op_logger;
use ic_registry_subnet_type::SubnetType;
use ic_replicated_state::{
canister_state::ENFORCE_MESSAGE_MEMORY_USAGE, testing::CanisterQueuesTesting, CallOrigin,
Memory, NumWasmPages, PageMap, SystemState,
};
use ic_system_api::{
sandbox_safe_system_state::SandboxSafeSystemState, ApiType, DefaultOutOfInstructionsHandler,
NonReplicatedQueryKind, SystemApiImpl,
};
use ic_test_utilities::{
cycles_account_manager::CyclesAccountManagerBuilder,
mock_time,
state::SystemStateBuilder,
types::{
ids::{call_context_test_id, canister_test_id, user_test_id},
messages::RequestBuilder,
},
};
use ic_types::{
messages::{CallContextId, CallbackId, RejectContext, MAX_RESPONSE_COUNT_BYTES},
methods::{Callback, WasmClosure},
user_error::RejectCode,
CountBytes, Cycles, NumBytes, NumInstructions, Time,
};
use std::{
convert::{From, TryInto},
sync::Arc,
};
mod common;
use common::*;
const INITIAL_CYCLES: Cycles = Cycles::new(1 << 40);
fn get_system_state_with_cycles(cycles_amount: Cycles) -> SystemState {
SystemState::new_running(
canister_test_id(42),
user_test_id(24).get(),
cycles_amount,
NumSeconds::from(100_000),
)
}
fn assert_api_supported<T>(res: HypervisorResult<T>) {
if let Err(HypervisorError::ContractViolation(err)) = res {
assert!(!err.contains("cannot be executed"), "{}", err)
}
}
fn assert_api_not_supported<T>(res: HypervisorResult<T>) {
match res {
Err(HypervisorError::ContractViolation(err)) => {
assert!(err.contains("cannot be executed"), "{}", err)
}
_ => unreachable!("Expected api to be unsupported."),
}
}
#[test]
fn test_canister_init_support() {
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let mut api = get_system_api(
ApiType::init(mock_time(), vec![], user_test_id(1).get()),
&get_system_state(),
cycles_account_manager,
);
assert_api_supported(api.ic0_msg_caller_size());
assert_api_supported(api.ic0_msg_caller_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_msg_arg_data_size());
assert_api_supported(api.ic0_msg_arg_data_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_method_name_size());
assert_api_not_supported(api.ic0_msg_method_name_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_accept_message());
assert_api_not_supported(api.ic0_msg_reply());
assert_api_not_supported(api.ic0_msg_reply_data_append(0, 0, &[]));
assert_api_not_supported(api.ic0_msg_reject(0, 0, &[]));
assert_api_not_supported(api.ic0_msg_reject_code());
assert_api_not_supported(api.ic0_msg_reject_msg_size());
assert_api_not_supported(api.ic0_msg_reject_msg_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_canister_self_size());
assert_api_supported(api.ic0_canister_self_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_controller_size());
assert_api_supported(api.ic0_controller_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_call_simple(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_not_supported(api.ic0_call_new(0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_not_supported(api.ic0_call_data_append(0, 0, &[]));
assert_api_not_supported(api.ic0_call_on_cleanup(0, 0));
assert_api_not_supported(api.ic0_call_cycles_add(0));
assert_api_not_supported(api.ic0_call_perform());
assert_api_supported(api.ic0_stable_size());
assert_api_supported(api.ic0_stable_grow(1));
assert_api_supported(api.ic0_stable_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_stable64_size());
assert_api_supported(api.ic0_stable64_grow(1));
assert_api_supported(api.ic0_stable64_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable64_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_time());
assert_api_supported(api.ic0_canister_cycle_balance());
assert_api_supported(api.ic0_canister_cycles_balance128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_available());
assert_api_not_supported(api.ic0_msg_cycles_available128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_refunded());
assert_api_not_supported(api.ic0_msg_cycles_refunded128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_accept(0));
assert_api_not_supported(api.ic0_msg_cycles_accept128(Cycles::zero(), 0, &mut []));
assert_api_supported(api.ic0_data_certificate_present());
assert_api_not_supported(api.ic0_data_certificate_size());
assert_api_not_supported(api.ic0_data_certificate_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_certified_data_set(0, 0, &[]));
assert_api_supported(api.ic0_canister_status());
assert_api_not_supported(api.ic0_mint_cycles(0));
}
#[test]
fn test_canister_update_support() {
let cycles_account_manager = CyclesAccountManagerBuilder::new()
.with_subnet_type(SubnetType::System)
.build();
let api_type = ApiTypeBuilder::new().build_update_api();
let mut api = get_system_api(api_type, &get_cmc_system_state(), cycles_account_manager);
assert_api_supported(api.ic0_msg_caller_size());
assert_api_supported(api.ic0_msg_caller_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_msg_arg_data_size());
assert_api_supported(api.ic0_msg_arg_data_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_method_name_size());
assert_api_not_supported(api.ic0_msg_method_name_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_accept_message());
assert_api_supported(api.ic0_msg_reply());
assert_api_supported(api.ic0_msg_reply_data_append(0, 0, &[]));
assert_api_supported(api.ic0_msg_reject(0, 0, &[]));
assert_api_not_supported(api.ic0_msg_reject_code());
assert_api_not_supported(api.ic0_msg_reject_msg_size());
assert_api_not_supported(api.ic0_msg_reject_msg_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_canister_self_size());
assert_api_supported(api.ic0_canister_self_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_controller_size());
assert_api_supported(api.ic0_controller_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_call_simple(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_supported(api.ic0_call_new(0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_supported(api.ic0_call_data_append(0, 0, &[]));
assert_api_supported(api.ic0_call_on_cleanup(0, 0));
assert_api_supported(api.ic0_call_cycles_add(0));
assert_api_supported(api.ic0_call_perform());
assert_api_supported(api.ic0_stable_size());
assert_api_supported(api.ic0_stable_grow(1));
assert_api_supported(api.ic0_stable_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_stable64_size());
assert_api_supported(api.ic0_stable64_grow(1));
assert_api_supported(api.ic0_stable64_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable64_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_time());
assert_api_supported(api.ic0_canister_cycle_balance());
assert_api_supported(api.ic0_canister_cycles_balance128(0, &mut []));
assert_api_supported(api.ic0_msg_cycles_available());
assert_api_supported(api.ic0_msg_cycles_available128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_refunded());
assert_api_not_supported(api.ic0_msg_cycles_refunded128(0, &mut []));
assert_api_supported(api.ic0_msg_cycles_accept(0));
assert_api_supported(api.ic0_msg_cycles_accept128(Cycles::zero(), 0, &mut []));
assert_api_supported(api.ic0_data_certificate_present());
assert_api_not_supported(api.ic0_data_certificate_size());
assert_api_not_supported(api.ic0_data_certificate_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_certified_data_set(0, 0, &[]));
assert_api_supported(api.ic0_canister_status());
assert_api_supported(api.ic0_mint_cycles(0));
}
#[test]
fn test_canister_replicated_query_support() {
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let mut api = get_system_api(
ApiType::replicated_query(mock_time(), vec![], user_test_id(1).get(), None),
&get_system_state(),
cycles_account_manager,
);
assert_api_supported(api.ic0_msg_arg_data_size());
assert_api_supported(api.ic0_msg_arg_data_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_msg_caller_size());
assert_api_supported(api.ic0_msg_caller_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_method_name_size());
assert_api_not_supported(api.ic0_msg_method_name_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_accept_message());
assert_api_supported(api.ic0_msg_reply());
assert_api_supported(api.ic0_msg_reply_data_append(0, 0, &[]));
assert_api_supported(api.ic0_msg_reject(0, 0, &[]));
assert_api_not_supported(api.ic0_msg_reject_code());
assert_api_not_supported(api.ic0_msg_reject_msg_size());
assert_api_not_supported(api.ic0_msg_reject_msg_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_canister_self_size());
assert_api_supported(api.ic0_canister_self_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_controller_size());
assert_api_supported(api.ic0_controller_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_call_simple(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_not_supported(api.ic0_call_new(0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_not_supported(api.ic0_call_data_append(0, 0, &[]));
assert_api_not_supported(api.ic0_call_on_cleanup(0, 0));
assert_api_not_supported(api.ic0_call_cycles_add(0));
assert_api_not_supported(api.ic0_call_perform());
assert_api_supported(api.ic0_stable_size());
assert_api_supported(api.ic0_stable_grow(1));
assert_api_supported(api.ic0_stable_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_stable64_size());
assert_api_supported(api.ic0_stable64_grow(1));
assert_api_supported(api.ic0_stable64_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable64_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_time());
assert_api_supported(api.ic0_canister_cycle_balance());
assert_api_supported(api.ic0_canister_cycles_balance128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_available());
assert_api_not_supported(api.ic0_msg_cycles_available128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_refunded());
assert_api_not_supported(api.ic0_msg_cycles_refunded128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_accept(0));
assert_api_not_supported(api.ic0_msg_cycles_accept128(Cycles::zero(), 0, &mut []));
assert_api_supported(api.ic0_data_certificate_present());
assert_api_not_supported(api.ic0_data_certificate_size());
assert_api_not_supported(api.ic0_data_certificate_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_certified_data_set(0, 0, &[]));
assert_api_supported(api.ic0_canister_status());
assert_api_not_supported(api.ic0_mint_cycles(0));
}
#[test]
fn test_canister_pure_query_support() {
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let mut api = get_system_api(
ApiType::replicated_query(mock_time(), vec![], user_test_id(1).get(), None),
&get_system_state(),
cycles_account_manager,
);
assert_api_supported(api.ic0_msg_arg_data_size());
assert_api_supported(api.ic0_msg_arg_data_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_msg_caller_size());
assert_api_supported(api.ic0_msg_caller_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_method_name_size());
assert_api_not_supported(api.ic0_msg_method_name_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_accept_message());
assert_api_supported(api.ic0_msg_reply());
assert_api_supported(api.ic0_msg_reply_data_append(0, 0, &[]));
assert_api_supported(api.ic0_msg_reject(0, 0, &[]));
assert_api_not_supported(api.ic0_msg_reject_code());
assert_api_not_supported(api.ic0_msg_reject_msg_size());
assert_api_not_supported(api.ic0_msg_reject_msg_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_canister_self_size());
assert_api_supported(api.ic0_canister_self_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_controller_size());
assert_api_supported(api.ic0_controller_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_call_simple(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_not_supported(api.ic0_call_new(0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_not_supported(api.ic0_call_data_append(0, 0, &[]));
assert_api_not_supported(api.ic0_call_on_cleanup(0, 0));
assert_api_not_supported(api.ic0_call_cycles_add(0));
assert_api_not_supported(api.ic0_call_perform());
assert_api_supported(api.ic0_stable_size());
assert_api_supported(api.ic0_stable_grow(1));
assert_api_supported(api.ic0_stable_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_stable64_size());
assert_api_supported(api.ic0_stable64_grow(1));
assert_api_supported(api.ic0_stable64_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable64_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_time());
assert_api_supported(api.ic0_canister_cycle_balance());
assert_api_supported(api.ic0_canister_cycles_balance128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_available());
assert_api_not_supported(api.ic0_msg_cycles_available128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_refunded());
assert_api_not_supported(api.ic0_msg_cycles_refunded128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_accept(0));
assert_api_not_supported(api.ic0_msg_cycles_accept128(Cycles::zero(), 0, &mut []));
assert_api_supported(api.ic0_data_certificate_present());
assert_api_not_supported(api.ic0_data_certificate_size());
assert_api_not_supported(api.ic0_data_certificate_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_certified_data_set(0, 0, &[]));
assert_api_supported(api.ic0_canister_status());
assert_api_not_supported(api.ic0_mint_cycles(0));
}
#[test]
fn test_canister_stateful_query_support() {
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let builder = ApiTypeBuilder::new();
let mut api = get_system_api(
ApiType::non_replicated_query(
mock_time(),
vec![],
user_test_id(1).get(),
CallContextId::from(1),
builder.own_subnet_id,
builder.network_topology,
Some(vec![1]),
NonReplicatedQueryKind::Stateful,
),
&get_system_state(),
cycles_account_manager,
);
assert_api_supported(api.ic0_msg_caller_size());
assert_api_supported(api.ic0_msg_caller_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_msg_arg_data_size());
assert_api_supported(api.ic0_msg_arg_data_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_method_name_size());
assert_api_not_supported(api.ic0_msg_method_name_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_accept_message());
assert_api_supported(api.ic0_msg_reply());
assert_api_supported(api.ic0_msg_reply_data_append(0, 0, &[]));
assert_api_supported(api.ic0_msg_reject(0, 0, &[]));
assert_api_not_supported(api.ic0_msg_reject_code());
assert_api_not_supported(api.ic0_msg_reject_msg_size());
assert_api_not_supported(api.ic0_msg_reject_msg_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_canister_self_size());
assert_api_supported(api.ic0_canister_self_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_controller_size());
assert_api_supported(api.ic0_controller_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_call_simple(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_supported(api.ic0_call_new(0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_supported(api.ic0_call_data_append(0, 0, &[]));
assert_api_supported(api.ic0_call_on_cleanup(0, 0));
assert_api_not_supported(api.ic0_call_cycles_add(0));
assert_api_supported(api.ic0_call_perform());
assert_api_supported(api.ic0_stable_size());
assert_api_supported(api.ic0_stable_grow(1));
assert_api_supported(api.ic0_stable_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_stable64_size());
assert_api_supported(api.ic0_stable64_grow(1));
assert_api_supported(api.ic0_stable64_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable64_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_time());
assert_api_supported(api.ic0_canister_cycle_balance());
assert_api_supported(api.ic0_canister_cycles_balance128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_available());
assert_api_not_supported(api.ic0_msg_cycles_available128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_refunded());
assert_api_not_supported(api.ic0_msg_cycles_refunded128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_accept(0));
assert_api_not_supported(api.ic0_msg_cycles_accept128(Cycles::zero(), 0, &mut []));
assert_api_supported(api.ic0_data_certificate_present());
assert_api_supported(api.ic0_data_certificate_size());
assert_api_supported(api.ic0_data_certificate_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_certified_data_set(0, 0, &[]));
assert_api_supported(api.ic0_canister_status());
assert_api_not_supported(api.ic0_mint_cycles(0));
}
#[test]
fn test_reply_api_support_on_nns() {
let cycles_account_manager = CyclesAccountManagerBuilder::new()
.with_subnet_type(SubnetType::System)
.build();
let api_type = ApiTypeBuilder::new().build_reply_api(Cycles::from(0));
let mut api = get_system_api(api_type, &get_cmc_system_state(), cycles_account_manager);
assert_api_not_supported(api.ic0_msg_caller_size());
assert_api_not_supported(api.ic0_msg_caller_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_msg_arg_data_size());
assert_api_supported(api.ic0_msg_arg_data_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_method_name_size());
assert_api_not_supported(api.ic0_msg_method_name_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_accept_message());
assert_api_supported(api.ic0_msg_reply());
assert_api_supported(api.ic0_msg_reply_data_append(0, 0, &[]));
assert_api_supported(api.ic0_msg_reject(0, 0, &[]));
assert_api_supported(api.ic0_msg_reject_code());
assert_api_not_supported(api.ic0_msg_reject_msg_size());
assert_api_not_supported(api.ic0_msg_reject_msg_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_canister_self_size());
assert_api_supported(api.ic0_canister_self_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_controller_size());
assert_api_supported(api.ic0_controller_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_call_simple(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_supported(api.ic0_call_new(0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_supported(api.ic0_call_data_append(0, 0, &[]));
assert_api_supported(api.ic0_call_on_cleanup(0, 0));
assert_api_supported(api.ic0_call_cycles_add(0));
assert_api_supported(api.ic0_call_perform());
assert_api_supported(api.ic0_stable_size());
assert_api_supported(api.ic0_stable_grow(1));
assert_api_supported(api.ic0_stable_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_stable64_size());
assert_api_supported(api.ic0_stable64_grow(1));
assert_api_supported(api.ic0_stable64_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable64_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_time());
assert_api_supported(api.ic0_canister_cycle_balance());
assert_api_supported(api.ic0_canister_cycles_balance128(0, &mut []));
assert_api_supported(api.ic0_msg_cycles_available());
assert_api_supported(api.ic0_msg_cycles_available128(0, &mut []));
assert_api_supported(api.ic0_msg_cycles_refunded());
assert_api_supported(api.ic0_msg_cycles_refunded128(0, &mut []));
assert_api_supported(api.ic0_msg_cycles_accept(0));
assert_api_supported(api.ic0_msg_cycles_accept128(Cycles::zero(), 0, &mut []));
assert_api_supported(api.ic0_data_certificate_present());
assert_api_not_supported(api.ic0_data_certificate_size());
assert_api_not_supported(api.ic0_data_certificate_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_certified_data_set(0, 0, &[]));
assert_api_supported(api.ic0_canister_status());
assert_api_supported(api.ic0_mint_cycles(0));
}
#[test]
fn test_reply_api_support_non_nns() {
let cycles_account_manager = CyclesAccountManagerBuilder::new()
.with_subnet_type(SubnetType::Application)
.build();
let api_type = ApiTypeBuilder::new().build_reply_api(Cycles::from(0));
let mut api = get_system_api(api_type, &get_system_state(), cycles_account_manager);
assert_api_not_supported(api.ic0_msg_caller_size());
assert_api_not_supported(api.ic0_msg_caller_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_msg_arg_data_size());
assert_api_supported(api.ic0_msg_arg_data_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_method_name_size());
assert_api_not_supported(api.ic0_msg_method_name_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_accept_message());
assert_api_supported(api.ic0_msg_reply());
assert_api_supported(api.ic0_msg_reply_data_append(0, 0, &[]));
assert_api_supported(api.ic0_msg_reject(0, 0, &[]));
assert_api_supported(api.ic0_msg_reject_code());
assert_api_not_supported(api.ic0_msg_reject_msg_size());
assert_api_not_supported(api.ic0_msg_reject_msg_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_canister_self_size());
assert_api_supported(api.ic0_canister_self_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_controller_size());
assert_api_supported(api.ic0_controller_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_call_simple(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_supported(api.ic0_call_new(0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_supported(api.ic0_call_data_append(0, 0, &[]));
assert_api_supported(api.ic0_call_on_cleanup(0, 0));
assert_api_supported(api.ic0_call_cycles_add(0));
assert_api_supported(api.ic0_call_perform());
assert_api_supported(api.ic0_stable_size());
assert_api_supported(api.ic0_stable_grow(1));
assert_api_supported(api.ic0_stable_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_stable_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable64_size());
assert_api_supported(api.ic0_stable64_grow(1));
assert_api_supported(api.ic0_stable64_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_stable64_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_time());
assert_api_supported(api.ic0_canister_cycle_balance());
assert_api_supported(api.ic0_canister_cycles_balance128(0, &mut []));
assert_api_supported(api.ic0_msg_cycles_available());
assert_api_supported(api.ic0_msg_cycles_available128(0, &mut []));
assert_api_supported(api.ic0_msg_cycles_refunded());
assert_api_supported(api.ic0_msg_cycles_refunded128(0, &mut []));
assert_api_supported(api.ic0_msg_cycles_accept(0));
assert_api_supported(api.ic0_msg_cycles_accept128(Cycles::zero(), 0, &mut []));
assert_api_supported(api.ic0_data_certificate_present());
assert_api_not_supported(api.ic0_data_certificate_size());
assert_api_not_supported(api.ic0_data_certificate_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_certified_data_set(0, 0, &[]));
assert_api_supported(api.ic0_canister_status());
assert_api_not_supported(api.ic0_mint_cycles(0));
}
#[test]
fn test_reject_api_support_on_nns() {
let cycles_account_manager = CyclesAccountManagerBuilder::new()
.with_subnet_type(SubnetType::System)
.build();
let api_type = ApiTypeBuilder::new().build_reject_api(RejectContext {
code: RejectCode::CanisterReject,
message: "error".to_string(),
});
let mut api = get_system_api(api_type, &get_cmc_system_state(), cycles_account_manager);
assert_api_not_supported(api.ic0_msg_caller_size());
assert_api_not_supported(api.ic0_msg_caller_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_arg_data_size());
assert_api_not_supported(api.ic0_msg_arg_data_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_method_name_size());
assert_api_not_supported(api.ic0_msg_method_name_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_accept_message());
assert_api_supported(api.ic0_msg_reply());
assert_api_supported(api.ic0_msg_reply_data_append(0, 0, &[]));
assert_api_supported(api.ic0_msg_reject(0, 0, &[]));
assert_api_supported(api.ic0_msg_reject_code());
assert_api_supported(api.ic0_msg_reject_msg_size());
assert_api_supported(api.ic0_msg_reject_msg_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_canister_self_size());
assert_api_supported(api.ic0_canister_self_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_controller_size());
assert_api_supported(api.ic0_controller_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_call_simple(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_supported(api.ic0_call_new(0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_supported(api.ic0_call_data_append(0, 0, &[]));
assert_api_supported(api.ic0_call_on_cleanup(0, 0));
assert_api_supported(api.ic0_call_cycles_add(0));
assert_api_supported(api.ic0_call_perform());
assert_api_supported(api.ic0_stable_size());
assert_api_supported(api.ic0_stable_grow(1));
assert_api_supported(api.ic0_stable_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_stable64_size());
assert_api_supported(api.ic0_stable64_grow(1));
assert_api_supported(api.ic0_stable64_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable64_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_time());
assert_api_supported(api.ic0_canister_cycle_balance());
assert_api_supported(api.ic0_canister_cycles_balance128(0, &mut []));
assert_api_supported(api.ic0_msg_cycles_available());
assert_api_supported(api.ic0_msg_cycles_available128(0, &mut []));
assert_api_supported(api.ic0_msg_cycles_refunded());
assert_api_supported(api.ic0_msg_cycles_refunded128(0, &mut []));
assert_api_supported(api.ic0_msg_cycles_accept(0));
assert_api_supported(api.ic0_msg_cycles_accept128(Cycles::zero(), 0, &mut []));
assert_api_supported(api.ic0_data_certificate_present());
assert_api_not_supported(api.ic0_data_certificate_size());
assert_api_not_supported(api.ic0_data_certificate_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_certified_data_set(0, 0, &[]));
assert_api_supported(api.ic0_canister_status());
assert_api_supported(api.ic0_mint_cycles(0));
}
#[test]
fn test_reject_api_support_non_nns() {
let cycles_account_manager = CyclesAccountManagerBuilder::new()
.with_subnet_type(SubnetType::System)
.build();
let api_type = ApiTypeBuilder::new().build_reject_api(RejectContext {
code: RejectCode::CanisterReject,
message: "error".to_string(),
});
let mut api = get_system_api(api_type, &get_system_state(), cycles_account_manager);
assert_api_not_supported(api.ic0_msg_caller_size());
assert_api_not_supported(api.ic0_msg_caller_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_arg_data_size());
assert_api_not_supported(api.ic0_msg_arg_data_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_method_name_size());
assert_api_not_supported(api.ic0_msg_method_name_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_accept_message());
assert_api_supported(api.ic0_msg_reply());
assert_api_supported(api.ic0_msg_reply_data_append(0, 0, &[]));
assert_api_supported(api.ic0_msg_reject(0, 0, &[]));
assert_api_supported(api.ic0_msg_reject_code());
assert_api_supported(api.ic0_msg_reject_msg_size());
assert_api_supported(api.ic0_msg_reject_msg_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_canister_self_size());
assert_api_supported(api.ic0_canister_self_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_controller_size());
assert_api_supported(api.ic0_controller_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_call_simple(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_supported(api.ic0_call_new(0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_supported(api.ic0_call_data_append(0, 0, &[]));
assert_api_supported(api.ic0_call_on_cleanup(0, 0));
assert_api_supported(api.ic0_call_cycles_add(0));
assert_api_supported(api.ic0_call_perform());
assert_api_supported(api.ic0_stable_size());
assert_api_supported(api.ic0_stable_grow(1));
assert_api_supported(api.ic0_stable_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_stable64_size());
assert_api_supported(api.ic0_stable64_grow(1));
assert_api_supported(api.ic0_stable64_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable64_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_time());
assert_api_supported(api.ic0_canister_cycle_balance());
assert_api_supported(api.ic0_canister_cycles_balance128(0, &mut []));
assert_api_supported(api.ic0_msg_cycles_available());
assert_api_supported(api.ic0_msg_cycles_available128(0, &mut []));
assert_api_supported(api.ic0_msg_cycles_refunded());
assert_api_supported(api.ic0_msg_cycles_refunded128(0, &mut []));
assert_api_supported(api.ic0_msg_cycles_accept(0));
assert_api_supported(api.ic0_msg_cycles_accept128(Cycles::zero(), 0, &mut []));
assert_api_supported(api.ic0_data_certificate_present());
assert_api_not_supported(api.ic0_data_certificate_size());
assert_api_not_supported(api.ic0_data_certificate_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_certified_data_set(0, 0, &[]));
assert_api_supported(api.ic0_canister_status());
assert_api_not_supported(api.ic0_mint_cycles(0));
}
#[test]
fn test_pre_upgrade_support() {
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let mut api = get_system_api(
ApiType::pre_upgrade(mock_time(), user_test_id(1).get()),
&get_system_state(),
cycles_account_manager,
);
assert_api_not_supported(api.ic0_msg_arg_data_size());
assert_api_not_supported(api.ic0_msg_arg_data_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_msg_caller_size());
assert_api_supported(api.ic0_msg_caller_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_method_name_size());
assert_api_not_supported(api.ic0_msg_method_name_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_accept_message());
assert_api_not_supported(api.ic0_msg_reply());
assert_api_not_supported(api.ic0_msg_reply_data_append(0, 0, &[]));
assert_api_not_supported(api.ic0_msg_reject(0, 0, &[]));
assert_api_not_supported(api.ic0_msg_reject_code());
assert_api_not_supported(api.ic0_msg_reject_msg_size());
assert_api_not_supported(api.ic0_msg_reject_msg_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_canister_self_size());
assert_api_supported(api.ic0_canister_self_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_controller_size());
assert_api_supported(api.ic0_controller_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_call_simple(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_not_supported(api.ic0_call_new(0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_not_supported(api.ic0_call_data_append(0, 0, &[]));
assert_api_not_supported(api.ic0_call_on_cleanup(0, 0));
assert_api_not_supported(api.ic0_call_cycles_add(0));
assert_api_not_supported(api.ic0_call_perform());
assert_api_supported(api.ic0_stable_size());
assert_api_supported(api.ic0_stable_grow(1));
assert_api_supported(api.ic0_stable_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_stable64_size());
assert_api_supported(api.ic0_stable64_grow(1));
assert_api_supported(api.ic0_stable64_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable64_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_time());
assert_api_supported(api.ic0_canister_cycle_balance());
assert_api_supported(api.ic0_canister_cycles_balance128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_available());
assert_api_not_supported(api.ic0_msg_cycles_available128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_refunded());
assert_api_not_supported(api.ic0_msg_cycles_refunded128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_accept(0));
assert_api_not_supported(api.ic0_msg_cycles_accept128(Cycles::zero(), 0, &mut []));
assert_api_supported(api.ic0_data_certificate_present());
assert_api_not_supported(api.ic0_data_certificate_size());
assert_api_not_supported(api.ic0_data_certificate_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_certified_data_set(0, 0, &[]));
assert_api_supported(api.ic0_canister_status());
assert_api_not_supported(api.ic0_mint_cycles(0));
}
#[test]
fn test_start_support() {
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let mut api = get_system_api(
ApiType::start(),
&get_system_state(),
cycles_account_manager,
);
assert_api_not_supported(api.ic0_msg_arg_data_size());
assert_api_not_supported(api.ic0_msg_arg_data_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_caller_size());
assert_api_not_supported(api.ic0_msg_caller_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_method_name_size());
assert_api_not_supported(api.ic0_msg_method_name_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_accept_message());
assert_api_not_supported(api.ic0_msg_reply());
assert_api_not_supported(api.ic0_msg_reply_data_append(0, 0, &[]));
assert_api_not_supported(api.ic0_msg_reject(0, 0, &[]));
assert_api_not_supported(api.ic0_msg_reject_code());
assert_api_not_supported(api.ic0_msg_reject_msg_size());
assert_api_not_supported(api.ic0_msg_reject_msg_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_canister_self_size());
assert_api_not_supported(api.ic0_canister_self_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_controller_size());
assert_api_not_supported(api.ic0_controller_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_call_simple(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_not_supported(api.ic0_call_new(0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_not_supported(api.ic0_call_data_append(0, 0, &[]));
assert_api_not_supported(api.ic0_call_on_cleanup(0, 0));
assert_api_not_supported(api.ic0_call_cycles_add(0));
assert_api_not_supported(api.ic0_call_perform());
assert_api_not_supported(api.ic0_stable_size());
assert_api_not_supported(api.ic0_stable_grow(1));
assert_api_not_supported(api.ic0_stable_read(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_stable_write(0, 0, 0, &[]));
assert_api_not_supported(api.ic0_stable64_size());
assert_api_not_supported(api.ic0_stable64_grow(1));
assert_api_not_supported(api.ic0_stable64_read(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_stable64_write(0, 0, 0, &[]));
assert_api_not_supported(api.ic0_time());
assert_api_not_supported(api.ic0_canister_cycle_balance());
assert_api_not_supported(api.ic0_canister_cycles_balance128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_available());
assert_api_not_supported(api.ic0_msg_cycles_available128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_refunded());
assert_api_not_supported(api.ic0_msg_cycles_refunded128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_accept(0));
assert_api_not_supported(api.ic0_msg_cycles_accept128(Cycles::zero(), 0, &mut []));
assert_api_supported(api.ic0_data_certificate_present());
assert_api_not_supported(api.ic0_data_certificate_size());
assert_api_not_supported(api.ic0_data_certificate_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_certified_data_set(0, 0, &[]));
assert_api_not_supported(api.ic0_canister_status());
assert_api_not_supported(api.ic0_mint_cycles(0));
}
#[test]
fn test_cleanup_support() {
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let mut api = get_system_api(
ApiType::Cleanup { time: mock_time() },
&get_system_state(),
cycles_account_manager,
);
assert_api_not_supported(api.ic0_msg_caller_size());
assert_api_not_supported(api.ic0_msg_caller_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_arg_data_size());
assert_api_not_supported(api.ic0_msg_arg_data_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_method_name_size());
assert_api_not_supported(api.ic0_msg_method_name_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_reply());
assert_api_not_supported(api.ic0_accept_message());
assert_api_not_supported(api.ic0_msg_reply_data_append(0, 0, &[]));
assert_api_not_supported(api.ic0_msg_reject(0, 0, &[]));
assert_api_not_supported(api.ic0_msg_reject_code());
assert_api_not_supported(api.ic0_msg_reject_msg_size());
assert_api_not_supported(api.ic0_msg_reject_msg_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_canister_self_size());
assert_api_supported(api.ic0_canister_self_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_controller_size());
assert_api_supported(api.ic0_controller_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_call_simple(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_not_supported(api.ic0_call_new(0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_not_supported(api.ic0_call_data_append(0, 0, &[]));
assert_api_not_supported(api.ic0_call_on_cleanup(0, 0));
assert_api_not_supported(api.ic0_call_cycles_add(0));
assert_api_not_supported(api.ic0_call_perform());
assert_api_supported(api.ic0_stable_size());
assert_api_supported(api.ic0_stable_grow(1));
assert_api_supported(api.ic0_stable_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_stable64_size());
assert_api_supported(api.ic0_stable64_grow(1));
assert_api_supported(api.ic0_stable64_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable64_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_time());
assert_api_supported(api.ic0_canister_cycle_balance());
assert_api_supported(api.ic0_canister_cycles_balance128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_available());
assert_api_not_supported(api.ic0_msg_cycles_available128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_refunded());
assert_api_not_supported(api.ic0_msg_cycles_refunded128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_accept(0));
assert_api_not_supported(api.ic0_msg_cycles_accept128(Cycles::zero(), 0, &mut []));
assert_api_supported(api.ic0_data_certificate_present());
assert_api_not_supported(api.ic0_data_certificate_size());
assert_api_not_supported(api.ic0_data_certificate_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_certified_data_set(0, 0, &[]));
assert_api_supported(api.ic0_canister_status());
assert_api_not_supported(api.ic0_mint_cycles(0));
}
#[test]
fn test_inspect_message_support() {
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let mut api = get_system_api(
ApiType::inspect_message(
user_test_id(1).get(),
"hello".to_string(),
vec![],
mock_time(),
),
&get_system_state(),
cycles_account_manager,
);
assert_api_supported(api.ic0_msg_caller_size());
assert_api_supported(api.ic0_msg_caller_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_msg_arg_data_size());
assert_api_supported(api.ic0_msg_arg_data_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_msg_method_name_size());
assert_api_supported(api.ic0_msg_method_name_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_accept_message());
assert_api_not_supported(api.ic0_msg_reply());
assert_api_not_supported(api.ic0_msg_reply_data_append(0, 0, &[]));
assert_api_not_supported(api.ic0_msg_reject(0, 0, &[]));
assert_api_not_supported(api.ic0_msg_reject_code());
assert_api_not_supported(api.ic0_msg_reject_msg_size());
assert_api_not_supported(api.ic0_msg_reject_msg_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_canister_self_size());
assert_api_supported(api.ic0_canister_self_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_controller_size());
assert_api_supported(api.ic0_controller_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_call_simple(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_not_supported(api.ic0_call_new(0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_not_supported(api.ic0_call_data_append(0, 0, &[]));
assert_api_not_supported(api.ic0_call_on_cleanup(0, 0));
assert_api_not_supported(api.ic0_call_cycles_add(0));
assert_api_not_supported(api.ic0_call_perform());
assert_api_supported(api.ic0_stable_size());
assert_api_supported(api.ic0_stable_grow(1));
assert_api_supported(api.ic0_stable_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_stable64_size());
assert_api_supported(api.ic0_stable64_grow(1));
assert_api_supported(api.ic0_stable64_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable64_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_time());
assert_api_supported(api.ic0_canister_cycle_balance());
assert_api_supported(api.ic0_canister_cycles_balance128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_available());
assert_api_not_supported(api.ic0_msg_cycles_available128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_refunded());
assert_api_not_supported(api.ic0_msg_cycles_refunded128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_accept(0));
assert_api_not_supported(api.ic0_msg_cycles_accept128(Cycles::zero(), 0, &mut []));
assert_api_supported(api.ic0_data_certificate_present());
assert_api_not_supported(api.ic0_data_certificate_size());
assert_api_not_supported(api.ic0_data_certificate_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_certified_data_set(0, 0, &[]));
assert_api_supported(api.ic0_canister_status());
assert_api_not_supported(api.ic0_mint_cycles(0));
}
#[test]
fn test_canister_heartbeat_support() {
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let mut api = get_system_api(
ApiTypeBuilder::new().build_heartbeat_api(),
&get_system_state(),
cycles_account_manager,
);
assert_api_not_supported(api.ic0_msg_caller_size());
assert_api_not_supported(api.ic0_msg_caller_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_arg_data_size());
assert_api_not_supported(api.ic0_msg_arg_data_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_method_name_size());
assert_api_not_supported(api.ic0_msg_method_name_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_accept_message());
assert_api_not_supported(api.ic0_msg_reply());
assert_api_not_supported(api.ic0_msg_reply_data_append(0, 0, &[]));
assert_api_not_supported(api.ic0_msg_reject(0, 0, &[]));
assert_api_not_supported(api.ic0_msg_reject_code());
assert_api_not_supported(api.ic0_msg_reject_msg_size());
assert_api_not_supported(api.ic0_msg_reject_msg_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_canister_self_size());
assert_api_supported(api.ic0_canister_self_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_controller_size());
assert_api_supported(api.ic0_controller_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_call_simple(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_supported(api.ic0_call_new(0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_supported(api.ic0_call_data_append(0, 0, &[]));
assert_api_supported(api.ic0_call_on_cleanup(0, 0));
assert_api_supported(api.ic0_call_cycles_add(0));
assert_api_supported(api.ic0_call_perform());
assert_api_supported(api.ic0_stable_size());
assert_api_supported(api.ic0_stable_grow(1));
assert_api_supported(api.ic0_stable_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_stable64_size());
assert_api_supported(api.ic0_stable64_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable64_grow(1));
assert_api_supported(api.ic0_stable64_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_time());
assert_api_supported(api.ic0_canister_cycle_balance());
assert_api_supported(api.ic0_canister_cycles_balance128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_available());
assert_api_not_supported(api.ic0_msg_cycles_available128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_refunded());
assert_api_not_supported(api.ic0_msg_cycles_refunded128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_accept(0));
assert_api_not_supported(api.ic0_msg_cycles_accept128(Cycles::zero(), 0, &mut []));
assert_api_supported(api.ic0_data_certificate_present());
assert_api_not_supported(api.ic0_data_certificate_size());
assert_api_not_supported(api.ic0_data_certificate_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_certified_data_set(0, 0, &[]));
assert_api_supported(api.ic0_canister_status());
assert_api_not_supported(api.ic0_mint_cycles(0));
}
#[test]
fn test_canister_heartbeat_support_nns() {
let cycles_account_manager = CyclesAccountManagerBuilder::new()
.with_subnet_type(SubnetType::System)
.build();
let api_type = ApiTypeBuilder::new().build_heartbeat_api();
let mut api = get_system_api(api_type, &get_cmc_system_state(), cycles_account_manager);
assert_api_not_supported(api.ic0_msg_caller_size());
assert_api_not_supported(api.ic0_msg_caller_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_arg_data_size());
assert_api_not_supported(api.ic0_msg_arg_data_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_msg_method_name_size());
assert_api_not_supported(api.ic0_msg_method_name_copy(0, 0, 0, &mut []));
assert_api_not_supported(api.ic0_accept_message());
assert_api_not_supported(api.ic0_msg_reply());
assert_api_not_supported(api.ic0_msg_reply_data_append(0, 0, &[]));
assert_api_not_supported(api.ic0_msg_reject(0, 0, &[]));
assert_api_not_supported(api.ic0_msg_reject_code());
assert_api_not_supported(api.ic0_msg_reject_msg_size());
assert_api_not_supported(api.ic0_msg_reject_msg_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_canister_self_size());
assert_api_supported(api.ic0_canister_self_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_controller_size());
assert_api_supported(api.ic0_controller_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_call_simple(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_supported(api.ic0_call_new(0, 0, 0, 0, 0, 0, 0, 0, &[]));
assert_api_supported(api.ic0_call_data_append(0, 0, &[]));
assert_api_supported(api.ic0_call_on_cleanup(0, 0));
assert_api_supported(api.ic0_call_cycles_add(0));
assert_api_supported(api.ic0_call_perform());
assert_api_supported(api.ic0_stable_size());
assert_api_supported(api.ic0_stable_grow(1));
assert_api_supported(api.ic0_stable_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_stable64_size());
assert_api_supported(api.ic0_stable64_grow(1));
assert_api_supported(api.ic0_stable64_read(0, 0, 0, &mut []));
assert_api_supported(api.ic0_stable64_write(0, 0, 0, &[]));
assert_api_supported(api.ic0_time());
assert_api_supported(api.ic0_canister_cycle_balance());
assert_api_supported(api.ic0_canister_cycles_balance128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_available());
assert_api_not_supported(api.ic0_msg_cycles_available128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_refunded());
assert_api_not_supported(api.ic0_msg_cycles_refunded128(0, &mut []));
assert_api_not_supported(api.ic0_msg_cycles_accept(0));
assert_api_not_supported(api.ic0_msg_cycles_accept128(Cycles::zero(), 0, &mut []));
assert_api_supported(api.ic0_data_certificate_present());
assert_api_not_supported(api.ic0_data_certificate_size());
assert_api_not_supported(api.ic0_data_certificate_copy(0, 0, 0, &mut []));
assert_api_supported(api.ic0_certified_data_set(0, 0, &[]));
assert_api_supported(api.ic0_canister_status());
assert_api_supported(api.ic0_mint_cycles(0));
}
#[test]
fn test_discard_cycles_charge_by_new_call() {
let cycles_amount = Cycles::from(1_000_000_000_000u128);
let max_num_instructions = NumInstructions::from(1 << 30);
let cycles_account_manager = CyclesAccountManagerBuilder::new()
.with_max_num_instructions(max_num_instructions)
.build();
let mut api = get_system_api(
ApiTypeBuilder::new().build_update_api(),
&get_system_state_with_cycles(cycles_amount),
cycles_account_manager,
);
// Check ic0_canister_cycle_balance after first ic0_call_new.
assert_eq!(api.ic0_call_new(0, 0, 0, 0, 0, 0, 0, 0, &[]), Ok(()));
// Check cycles balance.
assert_eq!(
Cycles::from(api.ic0_canister_cycle_balance().unwrap()),
cycles_amount
);
// Add cycles to call.
let amount = Cycles::from(49);
assert_eq!(api.ic0_call_cycles_add128(amount), Ok(()));
// Check cycles balance after call_add_cycles.
assert_eq!(
Cycles::from(api.ic0_canister_cycle_balance().unwrap()),
cycles_amount - amount
);
// Discard the previous call
assert_eq!(api.ic0_call_new(0, 0, 0, 0, 0, 0, 0, 0, &[]), Ok(()));
// Check cycles balance -> should be the same as the original as the call was
// discarded.
assert_eq!(
Cycles::from(api.ic0_canister_cycle_balance().unwrap()),
cycles_amount
);
}
#[test]
fn test_fail_add_cycles_when_not_enough_balance() {
let cycles_amount = Cycles::from(1_000_000_000_000u128);
let max_num_instructions = NumInstructions::from(1 << 30);
let cycles_account_manager = CyclesAccountManagerBuilder::new()
.with_max_num_instructions(max_num_instructions)
.build();
let system_state = get_system_state_with_cycles(cycles_amount);
let canister_id = system_state.canister_id();
let mut api = get_system_api(
ApiTypeBuilder::new().build_update_api(),
&system_state,
cycles_account_manager,
);
// Check ic0_canister_cycle_balance after first ic0_call_new.
assert_eq!(api.ic0_call_new(0, 0, 0, 0, 0, 0, 0, 0, &[]), Ok(()));
// Check cycles balance.
assert_eq!(
Cycles::from(api.ic0_canister_cycle_balance().unwrap()),
cycles_amount
);
// Add cycles to call.
let amount = cycles_amount + Cycles::from(1);
assert_eq!(
api.ic0_call_cycles_add128(amount).unwrap_err(),
HypervisorError::InsufficientCyclesBalance(CanisterOutOfCyclesError {
canister_id,
available: cycles_amount,
threshold: Cycles::from(0),
requested: amount,
})
);
//Check cycles balance after call_add_cycles.
assert_eq!(
Cycles::from(api.ic0_canister_cycle_balance().unwrap()),
cycles_amount
);
}
#[test]
fn test_fail_adding_more_cycles_when_not_enough_balance() {
let cycles_amount = 1_000_000_000_000;
let max_num_instructions = NumInstructions::from(1 << 30);
let cycles_account_manager = CyclesAccountManagerBuilder::new()
.with_max_num_instructions(max_num_instructions)
.build();
let system_state = get_system_state_with_cycles(Cycles::from(cycles_amount));
let canister_id = system_state.canister_id();
let mut api = get_system_api(
ApiTypeBuilder::new().build_update_api(),
&system_state,
cycles_account_manager,
);
// Check ic0_canister_cycle_balance after first ic0_call_new.
assert_eq!(api.ic0_call_new(0, 0, 0, 0, 0, 0, 0, 0, &[]), Ok(()));
// Check cycles balance.
assert_eq!(
api.ic0_canister_cycle_balance().unwrap() as u128,
cycles_amount
);
// Add cycles to call.
let amount = cycles_amount / 2 + 1;
assert_eq!(
api.ic0_call_cycles_add128(amount.try_into().unwrap()),
Ok(())
);
// Check cycles balance after call_add_cycles.
assert_eq!(
api.ic0_canister_cycle_balance().unwrap() as u128,
cycles_amount - amount
);
// Adding more cycles fails because not enough balance left.
assert_eq!(
api.ic0_call_cycles_add128(amount.try_into().unwrap())
.unwrap_err(),
HypervisorError::InsufficientCyclesBalance(CanisterOutOfCyclesError {
canister_id,
available: Cycles::from(cycles_amount - amount),
threshold: Cycles::from(0),
requested: Cycles::from(amount),
})
);
// Balance unchanged after the second call_add_cycles.
assert_eq!(
api.ic0_canister_cycle_balance().unwrap() as u128,
cycles_amount - amount
);
}
#[test]
fn test_canister_balance() {
let cycles_amount = 100;
let max_num_instructions = NumInstructions::from(1 << 30);
let cycles_account_manager = CyclesAccountManagerBuilder::new()
.with_max_num_instructions(max_num_instructions)
.build();
let mut system_state = get_system_state_with_cycles(Cycles::from(cycles_amount));
system_state
.call_context_manager_mut()
.unwrap()
.new_call_context(
CallOrigin::CanisterUpdate(canister_test_id(33), CallbackId::from(5)),
Cycles::from(50),
Time::from_nanos_since_unix_epoch(0),
);
let api = get_system_api(
ApiTypeBuilder::new().build_update_api(),
&system_state,
cycles_account_manager,
);
// Check cycles balance.
assert_eq!(api.ic0_canister_cycle_balance().unwrap(), cycles_amount);
}
#[test]
fn test_canister_cycle_balance() {
let cycles_amount = Cycles::from(123456789012345678901234567890u128);
let max_num_instructions = NumInstructions::from(1 << 30);
let cycles_account_manager = CyclesAccountManagerBuilder::new()
.with_max_num_instructions(max_num_instructions)
.build();
let mut system_state = get_system_state_with_cycles(cycles_amount);
system_state
.call_context_manager_mut()
.unwrap()
.new_call_context(
CallOrigin::CanisterUpdate(canister_test_id(33), CallbackId::from(5)),
Cycles::from(50),
Time::from_nanos_since_unix_epoch(0),
);
let api = get_system_api(
ApiTypeBuilder::new().build_update_api(),
&system_state,
cycles_account_manager,
);
// Check ic0_canister_cycle_balance.
assert_eq!(
api.ic0_canister_cycle_balance(),
Err(HypervisorError::Trapped(
TrapCode::CyclesAmountTooBigFor64Bit
))
);
let mut heap = vec![0; 16];
api.ic0_canister_cycles_balance128(0, &mut heap).unwrap();
assert_eq!(heap, cycles_amount.get().to_le_bytes());
}
#[test]
fn test_msg_cycles_available_traps() {
let cycles_amount = Cycles::from(123456789012345678901234567890u128);
let available_cycles = Cycles::from(789012345678901234567890u128);
let mut system_state = get_system_state_with_cycles(cycles_amount);
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
system_state
.call_context_manager_mut()
.unwrap()
.new_call_context(
CallOrigin::CanisterUpdate(canister_test_id(33), CallbackId::from(5)),
available_cycles,
Time::from_nanos_since_unix_epoch(0),
);
let api = get_system_api(
ApiTypeBuilder::new().build_update_api(),
&system_state,
cycles_account_manager,
);
assert_eq!(
api.ic0_msg_cycles_available(),
Err(HypervisorError::Trapped(
TrapCode::CyclesAmountTooBigFor64Bit
))
);
let mut heap = vec![0; 16];
api.ic0_msg_cycles_available128(0, &mut heap).unwrap();
assert_eq!(heap, available_cycles.get().to_le_bytes());
}
#[test]
fn test_msg_cycles_refunded_traps() {
let incoming_cycles = Cycles::from(789012345678901234567890u128);
let cycles_amount = Cycles::from(123456789012345678901234567890u128);
let system_state = get_system_state_with_cycles(cycles_amount);
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let api = get_system_api(
ApiTypeBuilder::new().build_reply_api(incoming_cycles),
&system_state,
cycles_account_manager,
);
assert_eq!(
api.ic0_msg_cycles_refunded(),
Err(HypervisorError::Trapped(
TrapCode::CyclesAmountTooBigFor64Bit
))
);
let mut heap = vec![0; 16];
api.ic0_msg_cycles_refunded128(0, &mut heap).unwrap();
assert_eq!(heap, incoming_cycles.get().to_le_bytes());
}
#[test]
fn certified_data_set() {
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let mut system_state = SystemStateBuilder::default().build();
let mut api = get_system_api(
ApiTypeBuilder::new().build_update_api(),
&system_state,
cycles_account_manager,
);
let heap = vec![10; 33];
// Setting more than 32 bytes fails.
assert!(api.ic0_certified_data_set(0, 33, &heap).is_err());
// Setting out of bounds size fails.
assert!(api.ic0_certified_data_set(30, 10, &heap).is_err());
// Copy the certified data into the system state.
api.ic0_certified_data_set(0, 32, &heap).unwrap();
let system_state_changes = api.into_system_state_changes();
system_state_changes.apply_changes(&mut system_state);
assert_eq!(system_state.certified_data, vec![10; 32])
}
#[test]
fn data_certificate_copy() {
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let system_state = SystemStateBuilder::default().build();
let api = get_system_api(
ApiType::replicated_query(
mock_time(),
vec![],
user_test_id(1).get(),
Some(vec![1, 2, 3, 4, 5, 6]),
),
&system_state,
cycles_account_manager,
);
let mut heap = vec![0; 10];
// Copying with out of bounds offset + size fails.
assert!(api.ic0_data_certificate_copy(0, 0, 10, &mut heap).is_err());
assert!(api.ic0_data_certificate_copy(0, 10, 1, &mut heap).is_err());
// Copying with out of bounds dst + size fails.
assert!(api.ic0_data_certificate_copy(10, 1, 1, &mut heap).is_err());
assert!(api.ic0_data_certificate_copy(0, 1, 11, &mut heap).is_err());
// Copying all the data certificate.
api.ic0_data_certificate_copy(0, 0, 6, &mut heap).unwrap();
assert_eq!(heap, vec![1, 2, 3, 4, 5, 6, 0, 0, 0, 0]);
// Copying part of the data certificate.
api.ic0_data_certificate_copy(6, 2, 4, &mut heap).unwrap();
assert_eq!(heap, vec![1, 2, 3, 4, 5, 6, 3, 4, 5, 6]);
}
#[test]
fn canister_status() {
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let api = get_system_api(
ApiTypeBuilder::new().build_update_api(),
&get_system_state_with_cycles(INITIAL_CYCLES),
cycles_account_manager,
);
assert_eq!(api.ic0_canister_status(), Ok(1));
let stopping_system_state = SystemState::new_stopping(
canister_test_id(42),
user_test_id(24).get(),
INITIAL_CYCLES,
NumSeconds::from(100_000),
);
let api = get_system_api(
ApiTypeBuilder::new().build_update_api(),
&stopping_system_state,
cycles_account_manager,
);
assert_eq!(api.ic0_canister_status(), Ok(2));
let stopped_system_state = SystemState::new_stopped(
canister_test_id(42),
user_test_id(24).get(),
INITIAL_CYCLES,
NumSeconds::from(100_000),
);
let api = get_system_api(
ApiTypeBuilder::new().build_update_api(),
&stopped_system_state,
cycles_account_manager,
);
assert_eq!(api.ic0_canister_status(), Ok(3));
}
/// msg_cycles_accept() can accept all cycles in call context
#[test]
fn msg_cycles_accept_all_cycles_in_call_context() {
let amount = 50;
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let mut system_state = SystemStateBuilder::default().build();
system_state
.call_context_manager_mut()
.unwrap()
.new_call_context(
CallOrigin::CanisterUpdate(canister_test_id(33), CallbackId::from(5)),
Cycles::from(amount),
Time::from_nanos_since_unix_epoch(0),
);
let mut api = get_system_api(
ApiTypeBuilder::new().build_update_api(),
&system_state,
cycles_account_manager,
);
assert_eq!(api.ic0_msg_cycles_accept(amount), Ok(amount));
}
/// msg_cycles_accept() can accept all cycles in call context when more
/// asked for
#[test]
fn msg_cycles_accept_all_cycles_in_call_context_when_more_asked() {
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let mut system_state = SystemStateBuilder::default().build();
system_state
.call_context_manager_mut()
.unwrap()
.new_call_context(
CallOrigin::CanisterUpdate(canister_test_id(33), CallbackId::from(5)),
Cycles::from(40),
Time::from_nanos_since_unix_epoch(0),
);
let mut api = get_system_api(
ApiTypeBuilder::new().build_update_api(),
&system_state,
cycles_account_manager,
);
assert_eq!(api.ic0_msg_cycles_accept(50), Ok(40));
}
/// If call call_perform() fails because canister does not have enough
/// cycles to send the message, then the state is reset.
#[test]
fn call_perform_not_enough_cycles_resets_state() {
let cycles_account_manager = CyclesAccountManagerBuilder::new()
.with_subnet_type(SubnetType::Application)
.build();
// Set initial cycles small enough so that it does not have enough
// cycles to send xnet messages.
let initial_cycles = cycles_account_manager.xnet_call_performed_fee() - Cycles::from(10);
let mut system_state = SystemStateBuilder::new()
.initial_cycles(initial_cycles)
.build();
system_state
.call_context_manager_mut()
.unwrap()
.new_call_context(
CallOrigin::CanisterUpdate(canister_test_id(33), CallbackId::from(5)),
Cycles::from(40),
Time::from_nanos_since_unix_epoch(0),
);
let mut api = get_system_api(
ApiTypeBuilder::new().build_update_api(),
&system_state,
cycles_account_manager,
);
api.ic0_call_new(0, 10, 0, 10, 0, 0, 0, 0, &[0; 1024])
.unwrap();
api.ic0_call_cycles_add128(Cycles::from(100)).unwrap();
assert_eq!(api.ic0_call_perform().unwrap(), 2);
let system_state_changes = api.into_system_state_changes();
system_state_changes.apply_changes(&mut system_state);
assert_eq!(system_state.balance(), initial_cycles);
let call_context_manager = system_state.call_context_manager().unwrap();
assert_eq!(call_context_manager.call_contexts().len(), 1);
assert_eq!(call_context_manager.callbacks().len(), 0);
}
#[test]
fn stable_grow_updates_subnet_available_memory() {
let wasm_page_size = 64 << 10;
let subnet_available_memory_bytes = 2 * wasm_page_size;
let subnet_available_memory: SubnetAvailableMemory =
AvailableMemory::new(subnet_available_memory_bytes, 0).into();
let system_state = SystemStateBuilder::default().build();
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let sandbox_safe_system_state =
SandboxSafeSystemState::new(&system_state, cycles_account_manager);
let mut api = SystemApiImpl::new(
ApiTypeBuilder::new().build_update_api(),
sandbox_safe_system_state,
CANISTER_CURRENT_MEMORY_USAGE,
ExecutionParameters {
subnet_available_memory: subnet_available_memory.clone(),
..execution_parameters()
},
Memory::default(),
Arc::new(DefaultOutOfInstructionsHandler {}),
no_op_logger(),
);
assert_eq!(api.ic0_stable_grow(1).unwrap(), 0);
assert_eq!(subnet_available_memory.get_total_memory(), wasm_page_size);
assert_eq!(api.ic0_stable_grow(10).unwrap(), -1);
assert_eq!(subnet_available_memory.get_total_memory(), wasm_page_size);
}
#[test]
fn stable_grow_returns_allocated_memory_on_error() {
// Subnet with stable memory size above what can be represented on 32 bits.
let wasm_page_size = 64 << 10;
let subnet_available_memory_bytes = 2 * wasm_page_size;
let subnet_available_memory: SubnetAvailableMemory =
AvailableMemory::new(subnet_available_memory_bytes, 0).into();
let system_state = SystemStateBuilder::default().build();
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let sandbox_safe_system_state =
SandboxSafeSystemState::new(&system_state, cycles_account_manager);
let mut api = SystemApiImpl::new(
ApiTypeBuilder::new().build_update_api(),
sandbox_safe_system_state,
CANISTER_CURRENT_MEMORY_USAGE,
ExecutionParameters {
subnet_available_memory: subnet_available_memory.clone(),
..execution_parameters()
},
Memory::new(PageMap::default(), NumWasmPages::new(1 << 32)),
Arc::new(DefaultOutOfInstructionsHandler {}),
no_op_logger(),
);
// Ensure that ic0_stable_grow() returns an error.
assert_eq!(
api.ic0_stable_grow(1),
Err(HypervisorError::Trapped(
TrapCode::StableMemoryTooBigFor32Bit
))
);
// Subnet available memory should be unchanged.
assert_eq!(
subnet_available_memory.get_total_memory(),
subnet_available_memory_bytes
);
// As should the canister's current memory usage.
assert_eq!(
api.get_current_memory_usage(),
CANISTER_CURRENT_MEMORY_USAGE
);
}
#[test]
fn update_available_memory_updates_subnet_available_memory() {
let wasm_page_size = 64 << 10;
let subnet_available_memory_bytes = 2 * wasm_page_size;
let subnet_available_memory: SubnetAvailableMemory =
AvailableMemory::new(subnet_available_memory_bytes, 0).into();
let system_state = SystemStateBuilder::default().build();
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let sandbox_safe_system_state =
SandboxSafeSystemState::new(&system_state, cycles_account_manager);
let mut api = SystemApiImpl::new(
ApiTypeBuilder::new().build_update_api(),
sandbox_safe_system_state,
CANISTER_CURRENT_MEMORY_USAGE,
ExecutionParameters {
subnet_available_memory: subnet_available_memory.clone(),
..execution_parameters()
},
Memory::default(),
Arc::new(DefaultOutOfInstructionsHandler {}),
no_op_logger(),
);
api.update_available_memory(0, 1).unwrap();
assert_eq!(subnet_available_memory.get_total_memory(), wasm_page_size);
api.update_available_memory(0, 10).unwrap_err();
assert_eq!(subnet_available_memory.get_total_memory(), wasm_page_size);
assert_eq!(api.get_allocated_memory().get() as i64, wasm_page_size);
assert_eq!(api.get_allocated_message_memory().get() as i64, 0);
}
#[test]
fn take_execution_result_properly_frees_memory() {
let subnet_available_memory: SubnetAvailableMemory =
AvailableMemory::new(1 << 30, 1 << 30).into();
let system_state = SystemStateBuilder::default().build();
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let mut sandbox_safe_system_state =
SandboxSafeSystemState::new(&system_state, cycles_account_manager);
let own_canister_id = system_state.canister_id;
let callback_id = sandbox_safe_system_state
.register_callback(Callback::new(
call_context_test_id(0),
Some(own_canister_id),
Some(canister_test_id(0)),
Cycles::from(0),
WasmClosure::new(0, 0),
WasmClosure::new(0, 0),
None,
))
.unwrap();
let mut api = SystemApiImpl::new(
ApiTypeBuilder::new().build_update_api(),
sandbox_safe_system_state,
CANISTER_CURRENT_MEMORY_USAGE,
ExecutionParameters {
subnet_available_memory,
..execution_parameters()
},
Memory::default(),
Arc::new(DefaultOutOfInstructionsHandler {}),
no_op_logger(),
);
api.update_available_memory(0, 1).unwrap();
let req = RequestBuilder::default()
.sender(own_canister_id)
.sender_reply_callback(callback_id)
.build();
assert_eq!(0, api.push_output_request(req).unwrap());
assert!(api.get_allocated_memory().get() > 0);
assert!(api.get_allocated_message_memory().get() > 0);
assert_eq!(
api.take_execution_result(Some(&HypervisorError::OutOfMemory))
.unwrap_err(),
HypervisorError::OutOfMemory
);
assert_eq!(api.get_allocated_memory().get(), 0);
assert_eq!(api.get_allocated_message_memory().get(), 0);
}
#[test]
fn push_output_request_respects_memory_limits() {
let run_test = |subnet_available_memory_bytes, subnet_available_message_memory_bytes| {
let subnet_available_memory: SubnetAvailableMemory = AvailableMemory::new(
subnet_available_memory_bytes,
subnet_available_message_memory_bytes,
)
.into();
let mut system_state = SystemStateBuilder::default().build();
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let mut sandbox_safe_system_state =
SandboxSafeSystemState::new(&system_state, cycles_account_manager);
let own_canister_id = system_state.canister_id;
let callback_id = sandbox_safe_system_state
.register_callback(Callback::new(
call_context_test_id(0),
Some(own_canister_id),
Some(canister_test_id(0)),
Cycles::from(0),
WasmClosure::new(0, 0),
WasmClosure::new(0, 0),
None,
))
.unwrap();
let mut api = SystemApiImpl::new(
ApiTypeBuilder::new().build_update_api(),
sandbox_safe_system_state,
CANISTER_CURRENT_MEMORY_USAGE,
ExecutionParameters {
subnet_available_memory: subnet_available_memory.clone(),
..execution_parameters()
},
Memory::default(),
Arc::new(DefaultOutOfInstructionsHandler {}),
no_op_logger(),
);
let req = RequestBuilder::default()
.sender(own_canister_id)
.sender_reply_callback(callback_id)
.build();
// First push succeeds with or without message memory usage accounting, as the
// initial subnet available memory is `MAX_RESPONSE_COUNT_BYTES + 13`.
assert_eq!(0, api.push_output_request(req.clone()).unwrap());
if ENFORCE_MESSAGE_MEMORY_USAGE {
// With message memory usage enabled, `MAX_RESPONSE_COUNT_BYTES` are consumed.
assert_eq!(
subnet_available_memory_bytes - MAX_RESPONSE_COUNT_BYTES as i64,
subnet_available_memory.get_total_memory()
);
assert_eq!(
subnet_available_message_memory_bytes - MAX_RESPONSE_COUNT_BYTES as i64,
subnet_available_memory.get_message_memory()
);
assert_eq!(
CANISTER_CURRENT_MEMORY_USAGE + NumBytes::from(MAX_RESPONSE_COUNT_BYTES as u64),
api.get_current_memory_usage()
);
// And the second push fails.
assert_eq!(
RejectCode::SysTransient as i32,
api.push_output_request(req).unwrap()
);
// Without altering memory usage.
assert_eq!(
subnet_available_memory_bytes - MAX_RESPONSE_COUNT_BYTES as i64,
subnet_available_memory.get_total_memory()
);
assert_eq!(
subnet_available_message_memory_bytes - MAX_RESPONSE_COUNT_BYTES as i64,
subnet_available_memory.get_message_memory()
);
assert_eq!(
CANISTER_CURRENT_MEMORY_USAGE + NumBytes::from(MAX_RESPONSE_COUNT_BYTES as u64),
api.get_current_memory_usage()
);
assert_eq!(
NumBytes::from(MAX_RESPONSE_COUNT_BYTES as u64),
api.get_allocated_memory()
);
assert_eq!(
NumBytes::from(MAX_RESPONSE_COUNT_BYTES as u64),
api.get_allocated_message_memory()
);
} else {
// With message memory usage disabled, any number of pushes will succeed, as the
// memory usage is not affected.
assert_eq!(
subnet_available_memory_bytes,
subnet_available_memory.get_total_memory()
);
assert_eq!(
CANISTER_CURRENT_MEMORY_USAGE,
api.get_current_memory_usage()
);
}
// Ensure that exactly one output request was pushed.
let system_state_changes = api.into_system_state_changes();
system_state_changes.apply_changes(&mut system_state);
assert_eq!(1, system_state.queues().output_queues_len());
};
run_test(MAX_RESPONSE_COUNT_BYTES as i64 + 13, 1 << 30);
run_test(1 << 30, MAX_RESPONSE_COUNT_BYTES as i64 + 13);
}
#[test]
fn push_output_request_oversized_request_memory_limits() {
let subnet_available_memory_bytes = 3 * MAX_RESPONSE_COUNT_BYTES as i64;
let subnet_available_memory: SubnetAvailableMemory =
AvailableMemory::new(subnet_available_memory_bytes, 1 << 30).into();
let mut system_state = SystemStateBuilder::default().build();
let cycles_account_manager = CyclesAccountManagerBuilder::new().build();
let mut sandbox_safe_system_state =
SandboxSafeSystemState::new(&system_state, cycles_account_manager);
let own_canister_id = system_state.canister_id;
let callback_id = sandbox_safe_system_state
.register_callback(Callback::new(
call_context_test_id(0),
Some(own_canister_id),
Some(canister_test_id(0)),
Cycles::from(0),
WasmClosure::new(0, 0),
WasmClosure::new(0, 0),
None,
))
.unwrap();
let mut api = SystemApiImpl::new(
ApiTypeBuilder::new().build_update_api(),
sandbox_safe_system_state,
CANISTER_CURRENT_MEMORY_USAGE,
ExecutionParameters {
subnet_available_memory: subnet_available_memory.clone(),
..execution_parameters()
},
Memory::default(),
Arc::new(DefaultOutOfInstructionsHandler {}),
no_op_logger(),
);
// Oversized payload larger than available memory.
let req = RequestBuilder::default()
.sender(own_canister_id)
.sender_reply_callback(callback_id)
.method_payload(vec![13; 4 * MAX_RESPONSE_COUNT_BYTES])
.build();
if ENFORCE_MESSAGE_MEMORY_USAGE {
// With message memory usage enabled, not enough memory to push the request.
assert_eq!(
RejectCode::SysTransient as i32,
api.push_output_request(req).unwrap()
);
// Memory usage unchanged.
assert_eq!(
3 * MAX_RESPONSE_COUNT_BYTES as i64,
subnet_available_memory.get_total_memory()
);
assert_eq!(
CANISTER_CURRENT_MEMORY_USAGE,
api.get_current_memory_usage()
);
// Slightly smaller, still oversized request.
let req = RequestBuilder::default()
.sender(own_canister_id)
.method_payload(vec![13; 2 * MAX_RESPONSE_COUNT_BYTES])
.build();
let req_size_bytes = req.count_bytes();
assert!(req_size_bytes > MAX_RESPONSE_COUNT_BYTES);
// Pushing succeeds.
assert_eq!(0, api.push_output_request(req).unwrap());
// `req_size_bytes` are consumed.
assert_eq!(
(3 * MAX_RESPONSE_COUNT_BYTES - req_size_bytes) as i64,
subnet_available_memory.get_total_memory()
);
assert_eq!(
CANISTER_CURRENT_MEMORY_USAGE + NumBytes::from(req_size_bytes as u64),
api.get_current_memory_usage()
);
} else {
// With message memory usage disabled, push always succeeds.
assert_eq!(0, api.push_output_request(req).unwrap());
// And memory usage is not affected.
assert_eq!(
subnet_available_memory_bytes,
subnet_available_memory.get_total_memory()
);
assert_eq!(
CANISTER_CURRENT_MEMORY_USAGE,
api.get_current_memory_usage()
);
}
// Ensure that exactly one output request was pushed.
let system_state_changes = api.into_system_state_changes();
system_state_changes.apply_changes(&mut system_state);
assert_eq!(1, system_state.queues().output_queues_len());
}
| 45.748565 | 96 | 0.710831 |
1c556b9517a6324e8e0fcf4a8a6be25ea71a0f15 | 930 | use crate::syntax::{ast::node::FunctionDecl, parser::tests::check_parser};
use boa_interner::Interner;
/// Function declaration parsing.
#[test]
fn function_declaration() {
let mut interner = Interner::default();
check_parser(
"function hello() {}",
vec![FunctionDecl::new(interner.get_or_intern_static("hello"), vec![], vec![]).into()],
&mut interner,
);
}
/// Function declaration parsing with keywords.
#[test]
fn function_declaration_keywords() {
let mut interner = Interner::default();
check_parser(
"function yield() {}",
vec![FunctionDecl::new(interner.get_or_intern_static("yield"), vec![], vec![]).into()],
&mut interner,
);
let mut interner = Interner::default();
check_parser(
"function await() {}",
vec![FunctionDecl::new(interner.get_or_intern_static("await"), vec![], vec![]).into()],
&mut interner,
);
}
| 29.0625 | 95 | 0.625806 |
64bc0f35ef0346c89dff27be400d688e14e40bc4 | 3,370 | use std::rc::Rc;
use crate::tests::*;
use crate::FnCache;
use crate::VecCache;
#[test]
fn cache_fn_ptr() {
let mut vc = VecCache::new(square);
assert_eq!(vc.cache.len(), 0);
assert_eq!(vc.get(0), &0);
assert_eq!(vc.cache.len(), 1);
assert_eq!(vc.get(0), &0);
assert_eq!(vc.cache.len(), 1);
assert_eq!(vc.get(5), &25);
assert_eq!(vc.cache.len(), 6);
assert_eq!(vc.get(5), &25);
assert_eq!(vc.get(3), &9);
assert_eq!(vc.cache.len(), 6);
}
#[test]
fn cache_closure() {
let mut vc = VecCache::<u64>::new(|_cache, x| *x as u64 * *x as u64);
assert_eq!(vc.cache.len(), 0);
assert_eq!(vc.get(0), &0);
assert_eq!(vc.cache.len(), 1);
assert_eq!(vc.get(0), &0);
assert_eq!(vc.cache.len(), 1);
assert_eq!(vc.get(5), &25);
assert_eq!(vc.cache.len(), 6);
assert_eq!(vc.get(5), &25);
assert_eq!(vc.get(3), &9);
assert_eq!(vc.cache.len(), 6);
}
#[test]
fn cache_closure_capture() {
let y = 3;
let mut vc = VecCache::<u64>::new(|_cache, x| y * *x as u64 * *x as u64);
assert_eq!(vc.cache.len(), 0);
assert_eq!(vc.get(0), &0);
assert_eq!(vc.cache.len(), 1);
assert_eq!(vc.get(0), &0);
assert_eq!(vc.cache.len(), 1);
assert_eq!(vc.get(5), &75);
assert_eq!(vc.cache.len(), 6);
assert_eq!(vc.get(5), &75);
assert_eq!(vc.get(3), &27);
assert_eq!(vc.cache.len(), 6);
}
#[test]
fn cache_fn_ptr_recursive() {
let mut vc = VecCache::new(fib);
assert_eq!(vc.cache.len(), 0);
assert_eq!(vc.get(0), &0);
assert_eq!(vc.cache.len(), 1);
assert_eq!(vc.get(0), &0);
assert_eq!(vc.cache.len(), 1);
assert_eq!(vc.get(5), &5);
assert_eq!(vc.cache.len(), 6);
assert_eq!(vc.get(5), &5);
assert_eq!(vc.get(3), &2);
assert_eq!(vc.cache.len(), 6);
}
#[test]
fn cache_closure_recursive() {
let mut vc = VecCache::<u64>::new(|cache, x| match x {
0 => 0,
1 => 1,
_ => *cache.get(x - 1) + *cache.get(x - 2),
});
assert_eq!(vc.cache.len(), 0);
assert_eq!(vc.get(0), &0);
assert_eq!(vc.cache.len(), 1);
assert_eq!(vc.get(0), &0);
assert_eq!(vc.cache.len(), 1);
assert_eq!(vc.get(5), &5);
assert_eq!(vc.cache.len(), 6);
assert_eq!(vc.get(5), &5);
assert_eq!(vc.get(3), &2);
assert_eq!(vc.cache.len(), 6);
}
#[test]
fn cache_alternate_cache() {
let mut vc = VecCache::<Rc<u64>>::new(|cache, x|
Rc::new(match x {
0 => 0,
1 => 1,
_ => *cache.get(x - 1).clone() + *cache.get(x - 2).clone(),
})
);
assert_eq!(vc.cache.len(), 0);
assert_eq!(*vc.get(0).clone(), 0);
assert_eq!(vc.cache.len(), 1);
assert_eq!(*vc.get(0).clone(), 0);
assert_eq!(vc.cache.len(), 1);
assert_eq!(*vc.get(5).clone(), 5);
assert_eq!(vc.cache.len(), 6);
assert_eq!(*vc.get(5).clone(), 5);
assert_eq!(*vc.get(3).clone(), 2);
assert_eq!(vc.cache.len(), 6);
}
#[test]
fn clear() {
let mut vc = VecCache::<usize>::new(|_cache, x| *x);
vc.get(2);
assert_eq!(vc.cache.len(), 3);
vc.clear();
assert_eq!(vc.cache.len(), 0);
}
#[test]
fn len() {
let mut vc = VecCache::<usize>::new(|_cache, x| *x);
vc.get(0);
vc.get(1);
vc.get(2);
assert_eq!(vc.len(), 3);
}
#[test]
fn reserve() {
let mut vc = VecCache::<usize>::new(|_cache, x| *x);
vc.get(0);
vc.get(1);
vc.get(2);
for additional in 20..60 {
vc.cache.shrink_to_fit();
vc.reserve(additional);
assert!(
vc.len() + additional <= vc.cache.capacity(),
"len = {}, capacity = {}, additional = {}",
vc.len(),
vc.cache.capacity(),
additional
);
}
}
| 19.593023 | 74 | 0.582196 |
ab708df3e885eeb7dc7b528930cb0e12f8399d7d | 9,833 | #![allow(dead_code)]
use std::ffi::CStr;
use std::mem;
use std::os::raw::c_void;
use std::ptr;
use cgmath::{perspective, vec3, Deg, EuclideanSpace, InnerSpace, Matrix4, Point3, SquareMatrix, Vector3};
use gl::types::*;
use glfw::Context;
use crate::c_str;
use crate::shared::{load_texture, process_events, process_input, Camera, Shader};
// settings
const SCR_WIDTH: u32 = 480;
const SCR_HEIGHT: u32 = 320;
pub fn main_2_5_4() {
let mut camera = Camera { position: Point3::new(0.0, 0.0, 3.0), ..Camera::default() };
let mut first_mouse = true;
let mut last_x: f32 = SCR_WIDTH as f32 / 2.0;
let mut last_y: f32 = SCR_HEIGHT as f32 / 2.0;
let mut delta_time: f32;
let mut last_frame: f32 = 0.0;
// glfw: initialize and configure
// ------------------------------
let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
glfw.window_hint(glfw::WindowHint::ContextVersion(3, 3));
glfw.window_hint(glfw::WindowHint::OpenGlProfile(glfw::OpenGlProfileHint::Core));
#[cfg(target_os = "macos")]
glfw.window_hint(glfw::WindowHint::OpenGlForwardCompat(true));
// glfw window creation
// --------------------
let (mut window, events) = glfw
.create_window(SCR_WIDTH, SCR_HEIGHT, "LearnOpenGL", glfw::WindowMode::Windowed)
.expect("Failed to create GLFW window");
window.make_current();
window.set_key_polling(true);
window.set_framebuffer_size_polling(true);
window.set_cursor_pos_polling(true);
window.set_scroll_polling(true);
// tell GLFW to capture our mouse
window.set_cursor_mode(glfw::CursorMode::Disabled);
// gl: load all OpenGL function pointers
// ---------------------------------------
gl::load_with(|symbol| window.get_proc_address(symbol) as *const _);
let (light_shader, vbo, cube_vao, light_vao, diffuse_map, specular_map, cube_pos) = unsafe {
// configure global opengl state
gl::Enable(gl::DEPTH_TEST);
// build and compile our shader program
// ------------------------------------
let light_shader = Shader::new(
"src/tutorial/_2_lighting/shaders/5.2.light_casters.vsh",
"src/tutorial/_2_lighting/shaders/5.2.light_casters.fsh",
);
// setup vertex data
// -----------------
let vertices: [f32; 288] = [
// positions // normals // texture coords
-0.5, -0.5, -0.5, 0.0, 0.0, -1.0, 0.0, 0.0, 0.5, -0.5, -0.5, 0.0, 0.0, -1.0, 1.0, 0.0, 0.5, 0.5, -0.5, 0.0,
0.0, -1.0, 1.0, 1.0, 0.5, 0.5, -0.5, 0.0, 0.0, -1.0, 1.0, 1.0, -0.5, 0.5, -0.5, 0.0, 0.0, -1.0, 0.0, 1.0,
-0.5, -0.5, -0.5, 0.0, 0.0, -1.0, 0.0, 0.0, //
-0.5, -0.5, 0.5, 0.0, 0.0, 1.0, 0.0, 0.0, 0.5, -0.5, 0.5, 0.0, 0.0, 1.0, 1.0, 0.0, 0.5, 0.5, 0.5, 0.0, 0.0,
1.0, 1.0, 1.0, 0.5, 0.5, 0.5, 0.0, 0.0, 1.0, 1.0, 1.0, -0.5, 0.5, 0.5, 0.0, 0.0, 1.0, 0.0, 1.0, -0.5, -0.5,
0.5, 0.0, 0.0, 1.0, 0.0, 0.0, //
-0.5, 0.5, 0.5, -1.0, 0.0, 0.0, 1.0, 0.0, -0.5, 0.5, -0.5, -1.0, 0.0, 0.0, 1.0, 1.0, -0.5, -0.5, -0.5,
-1.0, 0.0, 0.0, 0.0, 1.0, -0.5, -0.5, -0.5, -1.0, 0.0, 0.0, 0.0, 1.0, -0.5, -0.5, 0.5, -1.0, 0.0, 0.0, 0.0,
0.0, -0.5, 0.5, 0.5, -1.0, 0.0, 0.0, 1.0, 0.0, //
0.5, 0.5, 0.5, 1.0, 0.0, 0.0, 1.0, 0.0, 0.5, 0.5, -0.5, 1.0, 0.0, 0.0, 1.0, 1.0, 0.5, -0.5, -0.5, 1.0, 0.0,
0.0, 0.0, 1.0, 0.5, -0.5, -0.5, 1.0, 0.0, 0.0, 0.0, 1.0, 0.5, -0.5, 0.5, 1.0, 0.0, 0.0, 0.0, 0.0, 0.5, 0.5,
0.5, 1.0, 0.0, 0.0, 1.0, 0.0, //
-0.5, -0.5, -0.5, 0.0, -1.0, 0.0, 0.0, 1.0, 0.5, -0.5, -0.5, 0.0, -1.0, 0.0, 1.0, 1.0, 0.5, -0.5, 0.5, 0.0,
-1.0, 0.0, 1.0, 0.0, 0.5, -0.5, 0.5, 0.0, -1.0, 0.0, 1.0, 0.0, -0.5, -0.5, 0.5, 0.0, -1.0, 0.0, 0.0, 0.0,
-0.5, -0.5, -0.5, 0.0, -1.0, 0.0, 0.0, 1.0, //
-0.5, 0.5, -0.5, 0.0, 1.0, 0.0, 0.0, 1.0, 0.5, 0.5, -0.5, 0.0, 1.0, 0.0, 1.0, 1.0, 0.5, 0.5, 0.5, 0.0, 1.0,
0.0, 1.0, 0.0, 0.5, 0.5, 0.5, 0.0, 1.0, 0.0, 1.0, 0.0, -0.5, 0.5, 0.5, 0.0, 1.0, 0.0, 0.0, 0.0, -0.5, 0.5,
-0.5, 0.0, 1.0, 0.0, 0.0, 1.0,
];
let cube_pos: [Vector3<f32>; 10] = [
vec3(0.0, 0.0, 0.0),
vec3(2.0, 5.0, -15.0),
vec3(-1.5, -2.2, -2.5),
vec3(-3.8, -2.0, -12.3),
vec3(2.4, -0.4, -3.5),
vec3(-1.7, 3.0, -7.5),
vec3(1.3, -2.0, -2.5),
vec3(1.5, 2.0, -2.5),
vec3(1.5, 0.2, -1.5),
vec3(-1.3, 1.0, -1.5),
];
let (mut vbo, mut cube_vao) = (0, 0);
gl::GenVertexArrays(1, &mut cube_vao);
gl::GenBuffers(1, &mut vbo);
gl::BindVertexArray(cube_vao);
gl::BindBuffer(gl::ARRAY_BUFFER, vbo);
gl::BufferData(
gl::ARRAY_BUFFER,
(vertices.len() * mem::size_of::<GLfloat>()) as GLsizeiptr,
&vertices[0] as *const f32 as *const c_void,
gl::STATIC_DRAW,
);
let stride = 8 * mem::size_of::<GLfloat>() as GLsizei;
// position attribute
gl::VertexAttribPointer(0, 3, gl::FLOAT, gl::FALSE, stride, ptr::null());
gl::EnableVertexAttribArray(0);
// normal attribute
gl::VertexAttribPointer(1, 3, gl::FLOAT, gl::FALSE, stride, (3 * mem::size_of::<GLfloat>()) as *const c_void);
gl::EnableVertexAttribArray(1);
// texture coord
gl::VertexAttribPointer(2, 2, gl::FLOAT, gl::FALSE, stride, (6 * mem::size_of::<GLfloat>()) as *const c_void);
gl::EnableVertexAttribArray(2);
let mut light_vao = 0;
gl::GenVertexArrays(1, &mut light_vao);
gl::BindVertexArray(light_vao);
gl::BindBuffer(gl::ARRAY_BUFFER, vbo);
gl::VertexAttribPointer(0, 3, gl::FLOAT, gl::FALSE, stride, ptr::null());
gl::EnableVertexAttribArray(0);
// load textures
// -------------
let diffuse_map = load_texture("resources/textures/container2.png");
let specular_map = load_texture("resources/textures/container2_specular.png");
// shader configuration
// --------------------
light_shader.use_program();
light_shader.set_int(c_str!("material.diffuse"), 0);
light_shader.set_int(c_str!("material.specular"), 1);
(light_shader, vbo, cube_vao, light_vao, diffuse_map, specular_map, cube_pos)
};
// render loop
// -----------
while !window.should_close() {
// pre-frame time logic
// --------------------
let current_frame = glfw.get_time() as f32;
delta_time = current_frame - last_frame;
last_frame = current_frame;
// events
// -----
process_events(&events, &mut first_mouse, &mut last_x, &mut last_y, &mut camera);
// input
// -----
process_input(&mut window, delta_time, &mut camera);
// render
// ------
unsafe {
gl::ClearColor(0.1, 0.1, 0.1, 1.0);
gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT);
light_shader.use_program();
light_shader.set_vector3(c_str!("light.position"), &camera.position.to_vec());
light_shader.set_vector3(c_str!("light.direction"), &camera.front);
light_shader.set_float(c_str!("light.cutOff"), 12.5f32.to_radians().cos());
light_shader.set_float(c_str!("light.outerCutOff"), 17.5f32.to_radians().cos());
light_shader.set_vector3(c_str!("viewPos"), &camera.position.to_vec());
// light properties
light_shader.set_vec3(c_str!("light.ambient"), 0.1, 0.1, 0.1);
light_shader.set_vec3(c_str!("light.diffuse"), 0.8, 0.8, 0.8);
light_shader.set_vec3(c_str!("light.specular"), 1.0, 1.0, 1.0);
light_shader.set_float(c_str!("light.constant"), 1.0);
light_shader.set_float(c_str!("light.linear"), 0.09);
light_shader.set_float(c_str!("light.quadratic"), 0.032);
// material properties
light_shader.set_float(c_str!("material.shininess"), 32.0);
// view/projection transformations
let projection: Matrix4<f32> =
perspective(Deg(camera.zoom), SCR_WIDTH as f32 / SCR_HEIGHT as f32, 0.1, 100.0);
let view = camera.get_view_matrix();
light_shader.set_mat4(c_str!("projection"), &projection);
light_shader.set_mat4(c_str!("view"), &view);
// world transformation
let model = Matrix4::<f32>::identity();
light_shader.set_mat4(c_str!("model"), &model);
// bind diffuse map
gl::ActiveTexture(gl::TEXTURE0);
gl::BindTexture(gl::TEXTURE_2D, diffuse_map);
// bind specular map
gl::ActiveTexture(gl::TEXTURE1);
gl::BindTexture(gl::TEXTURE_2D, specular_map);
// render
gl::BindVertexArray(cube_vao);
for (i, position) in cube_pos.iter().enumerate() {
let mut model: Matrix4<f32> = Matrix4::from_translation(*position);
let angle = 20.0 * i as f32;
model = model * Matrix4::from_axis_angle(vec3(1.0, 0.3, 0.5).normalize(), Deg(angle));
light_shader.set_mat4(c_str!("model"), &model);
gl::DrawArrays(gl::TRIANGLES, 0, 36);
}
}
// glfw: swap buffers and poll IO events (keys pressed/released, mouse moved etc.)
// -------------------------------------------------------------------------------
window.swap_buffers();
glfw.poll_events();
}
unsafe {
gl::DeleteVertexArrays(1, &cube_vao);
gl::DeleteVertexArrays(1, &light_vao);
gl::DeleteBuffers(1, &vbo);
}
}
| 41.665254 | 119 | 0.522323 |
bbb87dc3a1fee6cd4c1004f92cf358bdbd37efa1 | 7,410 | // Copyright 2020-2021 The Datafuse Authors.
//
// SPDX-License-Identifier: Apache-2.0.
use std::collections::HashMap;
use common_flights::status_err;
use pretty_assertions::assert_eq;
use tonic::Code;
use crate::engine::mem_engine::MemEngine;
use crate::protobuf::CmdCreateDatabase;
use crate::protobuf::CmdCreateTable;
use crate::protobuf::Db;
use crate::protobuf::Table;
#[test]
fn test_mem_engine_create_database() -> anyhow::Result<()> {
// TODO check generated ver
let eng = MemEngine::create();
let mut eng = eng.lock().unwrap();
let cmdfoo = CmdCreateDatabase {
db_name: "foo".into(),
db: Some(Db {
db_id: -1,
ver: -1,
table_name_to_id: HashMap::new(),
tables: HashMap::new(),
}),
};
let cmdbar = CmdCreateDatabase {
db_name: "bar".into(),
db: Some(Db {
db_id: -1,
ver: -1,
table_name_to_id: HashMap::new(),
tables: HashMap::new(),
}),
};
{
// create db foo
let rst = eng.create_database(cmdfoo.clone(), false);
assert_eq!(0, rst.unwrap());
assert_eq!(
Db {
db_id: 0,
ver: 0,
table_name_to_id: HashMap::new(),
tables: HashMap::new()
},
eng.get_database("foo".into()).unwrap()
);
}
{
// create db bar
let rst = eng.create_database(cmdbar.clone(), false);
assert_eq!(1, rst.unwrap());
assert_eq!(
Db {
db_id: 1,
ver: 1,
table_name_to_id: HashMap::new(),
tables: HashMap::new()
},
eng.get_database("bar".into()).unwrap()
);
}
{
// create db bar with if_not_exists=true
let rst = eng.create_database(cmdbar.clone(), true);
assert_eq!(1, rst.unwrap());
assert_eq!(
Db {
db_id: 1,
ver: 1,
table_name_to_id: HashMap::new(),
tables: HashMap::new()
},
eng.get_database("bar".into()).unwrap()
);
}
{
// create db bar failure
let rst = eng.create_database(cmdbar.clone(), false);
assert_eq!("bar database exists", format!("{}", rst.err().unwrap()));
assert_eq!(
Db {
db_id: 1,
ver: 1,
table_name_to_id: HashMap::new(),
tables: HashMap::new()
},
eng.get_database("bar".into()).unwrap(),
"got the previous bar"
);
}
Ok(())
}
#[test]
fn test_mem_engine_create_get_table() -> anyhow::Result<()> {
// TODO check generated ver
let eng = MemEngine::create();
let mut eng = eng.lock().unwrap();
let cmdfoo = CmdCreateDatabase {
db_name: "foo".into(),
db: Some(Db {
db_id: -1,
ver: -1,
table_name_to_id: HashMap::new(),
tables: HashMap::new(),
}),
};
let cmd_table = CmdCreateTable {
db_name: "foo".into(),
table_name: "t1".into(),
table: Some(Table {
table_id: -1,
ver: -1,
schema: vec![1, 2, 3],
options: maplit::hashmap! {"key".into() => "val".into()},
placement_policy: vec![1, 2, 3],
}),
};
{
// create db foo
let rst = eng.create_database(cmdfoo.clone(), false);
assert_eq!(0, rst.unwrap());
}
{
// create table
let rst = eng.create_table(cmd_table.clone(), false);
assert_eq!(1, rst.unwrap());
// get table t1
let got = eng.get_table("foo".into(), "t1".into());
assert!(got.is_ok());
let got = got.unwrap();
assert_eq!(
Table {
table_id: 1,
ver: 1,
schema: vec![1, 2, 3],
options: maplit::hashmap! {"key".into() => "val".into()},
placement_policy: vec![1, 2, 3]
},
got
);
}
{
// get table, db not found
let got = eng.get_table("notfound".into(), "t1".into());
assert!(got.is_err());
assert_eq!(
"status: Some requested entity was not found: database not found: notfound",
status_err(got.err().unwrap()).to_string()
);
}
{
// get table, table not found
let got = eng.get_table("foo".into(), "notfound".into());
assert!(got.is_err());
assert_eq!(
"status: Some requested entity was not found: table not found: notfound",
status_err(got.err().unwrap()).to_string()
);
}
Ok(())
}
#[test]
fn test_mem_engine_drop_database() -> anyhow::Result<()> {
let eng = MemEngine::create();
let mut eng = eng.lock().unwrap();
let test_db_name = "foo";
let cmd = CmdCreateDatabase {
db_name: test_db_name.to_string(),
db: Some(Db {
db_id: -1,
ver: -1,
table_name_to_id: HashMap::new(),
tables: HashMap::new(),
}),
};
let _ = eng.create_database(cmd.clone(), false).unwrap();
let r = eng.drop_database(test_db_name, false);
assert!(r.is_ok());
// with flag "IF EXISTS"
let r = eng.drop_database(test_db_name, true);
assert!(r.is_ok());
let r = eng.drop_database(test_db_name, false);
assert!(r.is_err());
assert_eq!(r.unwrap_err().code(), Code::NotFound);
Ok(())
}
#[test]
fn test_mem_engine_drop_table() -> anyhow::Result<()> {
let eng = MemEngine::create();
let test_db = "test_db";
let test_tbl = "test_tbl";
let mut eng = eng.lock().unwrap();
let cmd_db = CmdCreateDatabase {
db_name: test_db.to_string(),
db: Some(Db {
db_id: -1,
ver: -1,
table_name_to_id: HashMap::new(),
tables: HashMap::new(),
}),
};
let cmd_table = CmdCreateTable {
db_name: test_db.to_string(),
table_name: test_tbl.to_string(),
table: Some(Table {
table_id: -1,
ver: -1,
schema: vec![1, 2, 3],
options: maplit::hashmap! {"key".into() => "val".into()},
placement_policy: vec![1, 2, 3],
}),
};
// create db foo
eng.create_database(cmd_db.clone(), false).unwrap();
// create table
eng.create_table(cmd_table.clone(), false).unwrap();
let r = eng.drop_table(test_db, test_tbl, false);
assert!(r.is_ok());
// with flag "IF EXISTS"
// table not exist
let r = eng.drop_table(test_db, test_tbl, true);
assert!(r.is_ok());
// db not exist
let r = eng.drop_table("fake_db", test_tbl, true);
assert!(r.is_ok());
// without flag "IF EXISTS"
// table not exist
let r = eng.drop_table(test_db, test_tbl, false);
assert!(r.is_err());
assert_eq!(r.unwrap_err().code(), Code::NotFound);
// db not exist
let r = eng.drop_table("fak_db", test_tbl, false);
assert!(r.is_err());
assert_eq!(r.unwrap_err().code(), Code::NotFound);
let r = eng.drop_table("fak_db", "fake_tbl", false);
assert!(r.is_err());
assert_eq!(r.unwrap_err().code(), Code::NotFound);
Ok(())
}
| 26.945455 | 88 | 0.510526 |
f729973ddc62e1ccdab4f51531a644f2214da04b | 104,906 | use crate::pp::Breaks::{Consistent, Inconsistent};
use crate::pp::{self, Breaks};
use rustc_ast::attr;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, BinOpToken, CommentKind, DelimToken, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_ast::util::classify;
use rustc_ast::util::comments::{gather_comments, Comment, CommentStyle};
use rustc_ast::util::parser::{self, AssocOp, Fixity};
use rustc_ast::{self as ast, BlockCheckMode, PatKind, RangeEnd, RangeSyntax};
use rustc_ast::{GenericArg, MacArgs, ModKind};
use rustc_ast::{GenericBound, SelfKind, TraitBoundModifier};
use rustc_ast::{InlineAsmOperand, InlineAsmRegOrRegClass};
use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece};
use rustc_span::edition::Edition;
use rustc_span::source_map::{SourceMap, Spanned};
use rustc_span::symbol::{kw, sym, Ident, IdentPrinter, Symbol};
use rustc_span::{BytePos, FileName, Span};
use std::borrow::Cow;
pub enum MacHeader<'a> {
Path(&'a ast::Path),
Keyword(&'static str),
}
pub enum AnnNode<'a> {
Ident(&'a Ident),
Name(&'a Symbol),
Block(&'a ast::Block),
Item(&'a ast::Item),
SubItem(ast::NodeId),
Expr(&'a ast::Expr),
Pat(&'a ast::Pat),
Crate(&'a ast::Crate),
}
pub trait PpAnn {
fn pre(&self, _state: &mut State<'_>, _node: AnnNode<'_>) {}
fn post(&self, _state: &mut State<'_>, _node: AnnNode<'_>) {}
}
#[derive(Copy, Clone)]
pub struct NoAnn;
impl PpAnn for NoAnn {}
pub struct Comments<'a> {
sm: &'a SourceMap,
comments: Vec<Comment>,
current: usize,
}
impl<'a> Comments<'a> {
pub fn new(sm: &'a SourceMap, filename: FileName, input: String) -> Comments<'a> {
let comments = gather_comments(sm, filename, input);
Comments { sm, comments, current: 0 }
}
pub fn next(&self) -> Option<Comment> {
self.comments.get(self.current).cloned()
}
pub fn trailing_comment(
&self,
span: rustc_span::Span,
next_pos: Option<BytePos>,
) -> Option<Comment> {
if let Some(cmnt) = self.next() {
if cmnt.style != CommentStyle::Trailing {
return None;
}
let span_line = self.sm.lookup_char_pos(span.hi());
let comment_line = self.sm.lookup_char_pos(cmnt.pos);
let next = next_pos.unwrap_or_else(|| cmnt.pos + BytePos(1));
if span.hi() < cmnt.pos && cmnt.pos < next && span_line.line == comment_line.line {
return Some(cmnt);
}
}
None
}
}
pub struct State<'a> {
pub s: pp::Printer,
comments: Option<Comments<'a>>,
ann: &'a (dyn PpAnn + 'a),
}
crate const INDENT_UNIT: usize = 4;
/// Requires you to pass an input filename and reader so that
/// it can scan the input text for comments to copy forward.
pub fn print_crate<'a>(
sm: &'a SourceMap,
krate: &ast::Crate,
filename: FileName,
input: String,
ann: &'a dyn PpAnn,
is_expanded: bool,
edition: Edition,
) -> String {
let mut s =
State { s: pp::mk_printer(), comments: Some(Comments::new(sm, filename, input)), ann };
if is_expanded && !krate.attrs.iter().any(|attr| attr.has_name(sym::no_core)) {
// We need to print `#![no_std]` (and its feature gate) so that
// compiling pretty-printed source won't inject libstd again.
// However, we don't want these attributes in the AST because
// of the feature gate, so we fake them up here.
// `#![feature(prelude_import)]`
let pi_nested = attr::mk_nested_word_item(Ident::with_dummy_span(sym::prelude_import));
let list = attr::mk_list_item(Ident::with_dummy_span(sym::feature), vec![pi_nested]);
let fake_attr = attr::mk_attr_inner(list);
s.print_attribute(&fake_attr);
// Currently, in Rust 2018 we don't have `extern crate std;` at the crate
// root, so this is not needed, and actually breaks things.
if edition == Edition::Edition2015 {
// `#![no_std]`
let no_std_meta = attr::mk_word_item(Ident::with_dummy_span(sym::no_std));
let fake_attr = attr::mk_attr_inner(no_std_meta);
s.print_attribute(&fake_attr);
}
}
s.print_inner_attributes(&krate.attrs);
for item in &krate.items {
s.print_item(item);
}
s.print_remaining_comments();
s.ann.post(&mut s, AnnNode::Crate(krate));
s.s.eof()
}
/// This makes printed token streams look slightly nicer,
/// and also addresses some specific regressions described in #63896 and #73345.
fn tt_prepend_space(tt: &TokenTree, prev: &TokenTree) -> bool {
if let TokenTree::Token(token) = prev {
if matches!(token.kind, token::Dot | token::Dollar) {
return false;
}
if let token::DocComment(comment_kind, ..) = token.kind {
return comment_kind != CommentKind::Line;
}
}
match tt {
TokenTree::Token(token) => !matches!(token.kind, token::Comma | token::Not | token::Dot),
TokenTree::Delimited(_, DelimToken::Paren, _) => {
!matches!(prev, TokenTree::Token(Token { kind: token::Ident(..), .. }))
}
TokenTree::Delimited(_, DelimToken::Bracket, _) => {
!matches!(prev, TokenTree::Token(Token { kind: token::Pound, .. }))
}
TokenTree::Delimited(..) => true,
}
}
fn binop_to_string(op: BinOpToken) -> &'static str {
match op {
token::Plus => "+",
token::Minus => "-",
token::Star => "*",
token::Slash => "/",
token::Percent => "%",
token::Caret => "^",
token::And => "&",
token::Or => "|",
token::Shl => "<<",
token::Shr => ">>",
}
}
fn doc_comment_to_string(
comment_kind: CommentKind,
attr_style: ast::AttrStyle,
data: Symbol,
) -> String {
match (comment_kind, attr_style) {
(CommentKind::Line, ast::AttrStyle::Outer) => format!("///{}", data),
(CommentKind::Line, ast::AttrStyle::Inner) => format!("//!{}", data),
(CommentKind::Block, ast::AttrStyle::Outer) => format!("/**{}*/", data),
(CommentKind::Block, ast::AttrStyle::Inner) => format!("/*!{}*/", data),
}
}
pub fn literal_to_string(lit: token::Lit) -> String {
let token::Lit { kind, symbol, suffix } = lit;
let mut out = match kind {
token::Byte => format!("b'{}'", symbol),
token::Char => format!("'{}'", symbol),
token::Str => format!("\"{}\"", symbol),
token::StrRaw(n) => {
format!("r{delim}\"{string}\"{delim}", delim = "#".repeat(n as usize), string = symbol)
}
token::ByteStr => format!("b\"{}\"", symbol),
token::ByteStrRaw(n) => {
format!("br{delim}\"{string}\"{delim}", delim = "#".repeat(n as usize), string = symbol)
}
token::Integer | token::Float | token::Bool | token::Err => symbol.to_string(),
};
if let Some(suffix) = suffix {
out.push_str(&suffix.as_str())
}
out
}
fn visibility_qualified(vis: &ast::Visibility, s: &str) -> String {
format!("{}{}", State::new().to_string(|s| s.print_visibility(vis)), s)
}
impl std::ops::Deref for State<'_> {
type Target = pp::Printer;
fn deref(&self) -> &Self::Target {
&self.s
}
}
impl std::ops::DerefMut for State<'_> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.s
}
}
pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::DerefMut {
fn comments(&mut self) -> &mut Option<Comments<'a>>;
fn print_ident(&mut self, ident: Ident);
fn print_generic_args(&mut self, args: &ast::GenericArgs, colons_before_params: bool);
fn strsep<T, F>(
&mut self,
sep: &'static str,
space_before: bool,
b: Breaks,
elts: &[T],
mut op: F,
) where
F: FnMut(&mut Self, &T),
{
self.rbox(0, b);
if let Some((first, rest)) = elts.split_first() {
op(self, first);
for elt in rest {
if space_before {
self.space();
}
self.word_space(sep);
op(self, elt);
}
}
self.end();
}
fn commasep<T, F>(&mut self, b: Breaks, elts: &[T], op: F)
where
F: FnMut(&mut Self, &T),
{
self.strsep(",", false, b, elts, op)
}
fn maybe_print_comment(&mut self, pos: BytePos) {
while let Some(ref cmnt) = self.next_comment() {
if cmnt.pos < pos {
self.print_comment(cmnt);
} else {
break;
}
}
}
fn print_comment(&mut self, cmnt: &Comment) {
match cmnt.style {
CommentStyle::Mixed => {
if !self.is_beginning_of_line() {
self.zerobreak();
}
if let Some((last, lines)) = cmnt.lines.split_last() {
self.ibox(0);
for line in lines {
self.word(line.clone());
self.hardbreak()
}
self.word(last.clone());
self.space();
self.end();
}
self.zerobreak()
}
CommentStyle::Isolated => {
self.hardbreak_if_not_bol();
for line in &cmnt.lines {
// Don't print empty lines because they will end up as trailing
// whitespace.
if !line.is_empty() {
self.word(line.clone());
}
self.hardbreak();
}
}
CommentStyle::Trailing => {
if !self.is_beginning_of_line() {
self.word(" ");
}
if cmnt.lines.len() == 1 {
self.word(cmnt.lines[0].clone());
self.hardbreak()
} else {
self.ibox(0);
for line in &cmnt.lines {
if !line.is_empty() {
self.word(line.clone());
}
self.hardbreak();
}
self.end();
}
}
CommentStyle::BlankLine => {
// We need to do at least one, possibly two hardbreaks.
let twice = match self.last_token() {
pp::Token::String(s) => ";" == s,
pp::Token::Begin(_) => true,
pp::Token::End => true,
_ => false,
};
if twice {
self.hardbreak();
}
self.hardbreak();
}
}
if let Some(cmnts) = self.comments() {
cmnts.current += 1;
}
}
fn next_comment(&mut self) -> Option<Comment> {
self.comments().as_mut().and_then(|c| c.next())
}
fn print_literal(&mut self, lit: &ast::Lit) {
self.maybe_print_comment(lit.span.lo());
self.word(lit.token.to_string())
}
fn print_string(&mut self, st: &str, style: ast::StrStyle) {
let st = match style {
ast::StrStyle::Cooked => (format!("\"{}\"", st.escape_debug())),
ast::StrStyle::Raw(n) => {
format!("r{delim}\"{string}\"{delim}", delim = "#".repeat(n as usize), string = st)
}
};
self.word(st)
}
fn print_symbol(&mut self, sym: Symbol, style: ast::StrStyle) {
self.print_string(&sym.as_str(), style);
}
fn print_inner_attributes(&mut self, attrs: &[ast::Attribute]) {
self.print_either_attributes(attrs, ast::AttrStyle::Inner, false, true)
}
fn print_inner_attributes_no_trailing_hardbreak(&mut self, attrs: &[ast::Attribute]) {
self.print_either_attributes(attrs, ast::AttrStyle::Inner, false, false)
}
fn print_outer_attributes(&mut self, attrs: &[ast::Attribute]) {
self.print_either_attributes(attrs, ast::AttrStyle::Outer, false, true)
}
fn print_inner_attributes_inline(&mut self, attrs: &[ast::Attribute]) {
self.print_either_attributes(attrs, ast::AttrStyle::Inner, true, true)
}
fn print_outer_attributes_inline(&mut self, attrs: &[ast::Attribute]) {
self.print_either_attributes(attrs, ast::AttrStyle::Outer, true, true)
}
fn print_either_attributes(
&mut self,
attrs: &[ast::Attribute],
kind: ast::AttrStyle,
is_inline: bool,
trailing_hardbreak: bool,
) {
let mut count = 0;
for attr in attrs {
if attr.style == kind {
self.print_attribute_inline(attr, is_inline);
if is_inline {
self.nbsp();
}
count += 1;
}
}
if count > 0 && trailing_hardbreak && !is_inline {
self.hardbreak_if_not_bol();
}
}
fn print_attribute(&mut self, attr: &ast::Attribute) {
self.print_attribute_inline(attr, false)
}
fn print_attribute_inline(&mut self, attr: &ast::Attribute, is_inline: bool) {
if !is_inline {
self.hardbreak_if_not_bol();
}
self.maybe_print_comment(attr.span.lo());
match attr.kind {
ast::AttrKind::Normal(ref item, _) => {
match attr.style {
ast::AttrStyle::Inner => self.word("#!["),
ast::AttrStyle::Outer => self.word("#["),
}
self.print_attr_item(&item, attr.span);
self.word("]");
}
ast::AttrKind::DocComment(comment_kind, data) => {
self.word(doc_comment_to_string(comment_kind, attr.style, data));
self.hardbreak()
}
}
}
fn print_attr_item(&mut self, item: &ast::AttrItem, span: Span) {
self.ibox(0);
match &item.args {
MacArgs::Delimited(_, delim, tokens) => self.print_mac_common(
Some(MacHeader::Path(&item.path)),
false,
None,
delim.to_token(),
tokens,
true,
span,
),
MacArgs::Empty | MacArgs::Eq(..) => {
self.print_path(&item.path, false, 0);
if let MacArgs::Eq(_, token) = &item.args {
self.space();
self.word_space("=");
let token_str = self.token_to_string_ext(token, true);
self.word(token_str);
}
}
}
self.end();
}
fn print_meta_list_item(&mut self, item: &ast::NestedMetaItem) {
match item {
ast::NestedMetaItem::MetaItem(ref mi) => self.print_meta_item(mi),
ast::NestedMetaItem::Literal(ref lit) => self.print_literal(lit),
}
}
fn print_meta_item(&mut self, item: &ast::MetaItem) {
self.ibox(INDENT_UNIT);
match item.kind {
ast::MetaItemKind::Word => self.print_path(&item.path, false, 0),
ast::MetaItemKind::NameValue(ref value) => {
self.print_path(&item.path, false, 0);
self.space();
self.word_space("=");
self.print_literal(value);
}
ast::MetaItemKind::List(ref items) => {
self.print_path(&item.path, false, 0);
self.popen();
self.commasep(Consistent, &items[..], |s, i| s.print_meta_list_item(i));
self.pclose();
}
}
self.end();
}
/// This doesn't deserve to be called "pretty" printing, but it should be
/// meaning-preserving. A quick hack that might help would be to look at the
/// spans embedded in the TTs to decide where to put spaces and newlines.
/// But it'd be better to parse these according to the grammar of the
/// appropriate macro, transcribe back into the grammar we just parsed from,
/// and then pretty-print the resulting AST nodes (so, e.g., we print
/// expression arguments as expressions). It can be done! I think.
fn print_tt(&mut self, tt: &TokenTree, convert_dollar_crate: bool) {
match tt {
TokenTree::Token(token) => {
let token_str = self.token_to_string_ext(&token, convert_dollar_crate);
self.word(token_str);
if let token::DocComment(..) = token.kind {
self.hardbreak()
}
}
TokenTree::Delimited(dspan, delim, tts) => {
self.print_mac_common(
None,
false,
None,
*delim,
tts,
convert_dollar_crate,
dspan.entire(),
);
}
}
}
fn print_tts(&mut self, tts: &TokenStream, convert_dollar_crate: bool) {
let mut iter = tts.trees().peekable();
while let Some(tt) = iter.next() {
self.print_tt(&tt, convert_dollar_crate);
if let Some(next) = iter.peek() {
if tt_prepend_space(next, &tt) {
self.space();
}
}
}
}
fn print_mac_common(
&mut self,
header: Option<MacHeader<'_>>,
has_bang: bool,
ident: Option<Ident>,
delim: DelimToken,
tts: &TokenStream,
convert_dollar_crate: bool,
span: Span,
) {
if delim == DelimToken::Brace {
self.cbox(INDENT_UNIT);
}
match header {
Some(MacHeader::Path(path)) => self.print_path(path, false, 0),
Some(MacHeader::Keyword(kw)) => self.word(kw),
None => {}
}
if has_bang {
self.word("!");
}
if let Some(ident) = ident {
self.nbsp();
self.print_ident(ident);
}
match delim {
DelimToken::Brace => {
if header.is_some() || has_bang || ident.is_some() {
self.nbsp();
}
self.word("{");
if !tts.is_empty() {
self.space();
}
}
_ => {
let token_str = self.token_kind_to_string(&token::OpenDelim(delim));
self.word(token_str)
}
}
self.ibox(0);
self.print_tts(tts, convert_dollar_crate);
self.end();
match delim {
DelimToken::Brace => self.bclose(span),
_ => {
let token_str = self.token_kind_to_string(&token::CloseDelim(delim));
self.word(token_str)
}
}
}
fn print_mac_def(
&mut self,
macro_def: &ast::MacroDef,
ident: &Ident,
sp: &Span,
print_visibility: impl FnOnce(&mut Self),
) {
let (kw, has_bang) = if macro_def.macro_rules {
("macro_rules", true)
} else {
print_visibility(self);
("macro", false)
};
self.print_mac_common(
Some(MacHeader::Keyword(kw)),
has_bang,
Some(*ident),
macro_def.body.delim(),
¯o_def.body.inner_tokens(),
true,
*sp,
);
if macro_def.body.need_semicolon() {
self.word(";");
}
}
fn print_path(&mut self, path: &ast::Path, colons_before_params: bool, depth: usize) {
self.maybe_print_comment(path.span.lo());
for (i, segment) in path.segments[..path.segments.len() - depth].iter().enumerate() {
if i > 0 {
self.word("::")
}
self.print_path_segment(segment, colons_before_params);
}
}
fn print_path_segment(&mut self, segment: &ast::PathSegment, colons_before_params: bool) {
if segment.ident.name != kw::PathRoot {
self.print_ident(segment.ident);
if let Some(ref args) = segment.args {
self.print_generic_args(args, colons_before_params);
}
}
}
fn head<S: Into<Cow<'static, str>>>(&mut self, w: S) {
let w = w.into();
// Outer-box is consistent.
self.cbox(INDENT_UNIT);
// Head-box is inconsistent.
self.ibox(w.len() + 1);
// Keyword that starts the head.
if !w.is_empty() {
self.word_nbsp(w);
}
}
fn bopen(&mut self) {
self.word("{");
self.end(); // Close the head-box.
}
fn bclose_maybe_open(&mut self, span: rustc_span::Span, close_box: bool) {
self.maybe_print_comment(span.hi());
self.break_offset_if_not_bol(1, -(INDENT_UNIT as isize));
self.word("}");
if close_box {
self.end(); // Close the outer-box.
}
}
fn bclose(&mut self, span: rustc_span::Span) {
self.bclose_maybe_open(span, true)
}
fn break_offset_if_not_bol(&mut self, n: usize, off: isize) {
if !self.is_beginning_of_line() {
self.break_offset(n, off)
} else if off != 0 && self.last_token().is_hardbreak_tok() {
// We do something pretty sketchy here: tuck the nonzero
// offset-adjustment we were going to deposit along with the
// break into the previous hardbreak.
self.replace_last_token(pp::Printer::hardbreak_tok_offset(off));
}
}
fn nonterminal_to_string(&self, nt: &Nonterminal) -> String {
match *nt {
token::NtExpr(ref e) => self.expr_to_string(e),
token::NtMeta(ref e) => self.attr_item_to_string(e),
token::NtTy(ref e) => self.ty_to_string(e),
token::NtPath(ref e) => self.path_to_string(e),
token::NtItem(ref e) => self.item_to_string(e),
token::NtBlock(ref e) => self.block_to_string(e),
token::NtStmt(ref e) => self.stmt_to_string(e),
token::NtPat(ref e) => self.pat_to_string(e),
token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(e, is_raw).to_string(),
token::NtLifetime(e) => e.to_string(),
token::NtLiteral(ref e) => self.expr_to_string(e),
token::NtTT(ref tree) => self.tt_to_string(tree),
token::NtVis(ref e) => self.vis_to_string(e),
}
}
/// Print the token kind precisely, without converting `$crate` into its respective crate name.
fn token_kind_to_string(&self, tok: &TokenKind) -> String {
self.token_kind_to_string_ext(tok, None)
}
fn token_kind_to_string_ext(
&self,
tok: &TokenKind,
convert_dollar_crate: Option<Span>,
) -> String {
match *tok {
token::Eq => "=".to_string(),
token::Lt => "<".to_string(),
token::Le => "<=".to_string(),
token::EqEq => "==".to_string(),
token::Ne => "!=".to_string(),
token::Ge => ">=".to_string(),
token::Gt => ">".to_string(),
token::Not => "!".to_string(),
token::Tilde => "~".to_string(),
token::OrOr => "||".to_string(),
token::AndAnd => "&&".to_string(),
token::BinOp(op) => binop_to_string(op).to_string(),
token::BinOpEq(op) => format!("{}=", binop_to_string(op)),
/* Structural symbols */
token::At => "@".to_string(),
token::Dot => ".".to_string(),
token::DotDot => "..".to_string(),
token::DotDotDot => "...".to_string(),
token::DotDotEq => "..=".to_string(),
token::Comma => ",".to_string(),
token::Semi => ";".to_string(),
token::Colon => ":".to_string(),
token::ModSep => "::".to_string(),
token::RArrow => "->".to_string(),
token::LArrow => "<-".to_string(),
token::FatArrow => "=>".to_string(),
token::OpenDelim(token::Paren) => "(".to_string(),
token::CloseDelim(token::Paren) => ")".to_string(),
token::OpenDelim(token::Bracket) => "[".to_string(),
token::CloseDelim(token::Bracket) => "]".to_string(),
token::OpenDelim(token::Brace) => "{".to_string(),
token::CloseDelim(token::Brace) => "}".to_string(),
token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) => "".to_string(),
token::Pound => "#".to_string(),
token::Dollar => "$".to_string(),
token::Question => "?".to_string(),
token::SingleQuote => "'".to_string(),
/* Literals */
token::Literal(lit) => literal_to_string(lit),
/* Name components */
token::Ident(s, is_raw) => {
IdentPrinter::new(s, is_raw, convert_dollar_crate).to_string()
}
token::Lifetime(s) => s.to_string(),
/* Other */
token::DocComment(comment_kind, attr_style, data) => {
doc_comment_to_string(comment_kind, attr_style, data)
}
token::Eof => "<eof>".to_string(),
token::Interpolated(ref nt) => self.nonterminal_to_string(nt),
}
}
/// Print the token precisely, without converting `$crate` into its respective crate name.
fn token_to_string(&self, token: &Token) -> String {
self.token_to_string_ext(token, false)
}
fn token_to_string_ext(&self, token: &Token, convert_dollar_crate: bool) -> String {
let convert_dollar_crate = convert_dollar_crate.then_some(token.span);
self.token_kind_to_string_ext(&token.kind, convert_dollar_crate)
}
fn ty_to_string(&self, ty: &ast::Ty) -> String {
self.to_string(|s| s.print_type(ty))
}
fn bounds_to_string(&self, bounds: &[ast::GenericBound]) -> String {
self.to_string(|s| s.print_type_bounds("", bounds))
}
fn pat_to_string(&self, pat: &ast::Pat) -> String {
self.to_string(|s| s.print_pat(pat))
}
fn expr_to_string(&self, e: &ast::Expr) -> String {
self.to_string(|s| s.print_expr(e))
}
fn tt_to_string(&self, tt: &TokenTree) -> String {
self.to_string(|s| s.print_tt(tt, false))
}
fn tts_to_string(&self, tokens: &TokenStream) -> String {
self.to_string(|s| s.print_tts(tokens, false))
}
fn stmt_to_string(&self, stmt: &ast::Stmt) -> String {
self.to_string(|s| s.print_stmt(stmt))
}
fn item_to_string(&self, i: &ast::Item) -> String {
self.to_string(|s| s.print_item(i))
}
fn generic_params_to_string(&self, generic_params: &[ast::GenericParam]) -> String {
self.to_string(|s| s.print_generic_params(generic_params))
}
fn path_to_string(&self, p: &ast::Path) -> String {
self.to_string(|s| s.print_path(p, false, 0))
}
fn path_segment_to_string(&self, p: &ast::PathSegment) -> String {
self.to_string(|s| s.print_path_segment(p, false))
}
fn vis_to_string(&self, v: &ast::Visibility) -> String {
self.to_string(|s| s.print_visibility(v))
}
fn block_to_string(&self, blk: &ast::Block) -> String {
self.to_string(|s| {
// Containing cbox, will be closed by `print_block` at `}`.
s.cbox(INDENT_UNIT);
// Head-ibox, will be closed by `print_block` after `{`.
s.ibox(0);
s.print_block(blk)
})
}
fn meta_list_item_to_string(&self, li: &ast::NestedMetaItem) -> String {
self.to_string(|s| s.print_meta_list_item(li))
}
fn attr_item_to_string(&self, ai: &ast::AttrItem) -> String {
self.to_string(|s| s.print_attr_item(ai, ai.path.span))
}
fn attribute_to_string(&self, attr: &ast::Attribute) -> String {
self.to_string(|s| s.print_attribute(attr))
}
fn param_to_string(&self, arg: &ast::Param) -> String {
self.to_string(|s| s.print_param(arg, false))
}
fn to_string(&self, f: impl FnOnce(&mut State<'_>)) -> String {
let mut printer = State::new();
f(&mut printer);
printer.s.eof()
}
}
impl<'a> PrintState<'a> for State<'a> {
fn comments(&mut self) -> &mut Option<Comments<'a>> {
&mut self.comments
}
fn print_ident(&mut self, ident: Ident) {
self.s.word(IdentPrinter::for_ast_ident(ident, ident.is_raw_guess()).to_string());
self.ann.post(self, AnnNode::Ident(&ident))
}
fn print_generic_args(&mut self, args: &ast::GenericArgs, colons_before_params: bool) {
if colons_before_params {
self.s.word("::")
}
match *args {
ast::GenericArgs::AngleBracketed(ref data) => {
self.s.word("<");
self.commasep(Inconsistent, &data.args, |s, arg| match arg {
ast::AngleBracketedArg::Arg(a) => s.print_generic_arg(a),
ast::AngleBracketedArg::Constraint(c) => s.print_assoc_constraint(c),
});
self.s.word(">")
}
ast::GenericArgs::Parenthesized(ref data) => {
self.s.word("(");
self.commasep(Inconsistent, &data.inputs, |s, ty| s.print_type(ty));
self.s.word(")");
self.print_fn_ret_ty(&data.output);
}
}
}
}
impl<'a> State<'a> {
pub fn new() -> State<'a> {
State { s: pp::mk_printer(), comments: None, ann: &NoAnn }
}
// Synthesizes a comment that was not textually present in the original source
// file.
pub fn synth_comment(&mut self, text: String) {
self.s.word("/*");
self.s.space();
self.s.word(text);
self.s.space();
self.s.word("*/")
}
crate fn commasep_cmnt<T, F, G>(&mut self, b: Breaks, elts: &[T], mut op: F, mut get_span: G)
where
F: FnMut(&mut State<'_>, &T),
G: FnMut(&T) -> rustc_span::Span,
{
self.rbox(0, b);
let len = elts.len();
let mut i = 0;
for elt in elts {
self.maybe_print_comment(get_span(elt).hi());
op(self, elt);
i += 1;
if i < len {
self.s.word(",");
self.maybe_print_trailing_comment(get_span(elt), Some(get_span(&elts[i]).hi()));
self.space_if_not_bol();
}
}
self.end();
}
crate fn commasep_exprs(&mut self, b: Breaks, exprs: &[P<ast::Expr>]) {
self.commasep_cmnt(b, exprs, |s, e| s.print_expr(e), |e| e.span)
}
crate fn print_foreign_mod(&mut self, nmod: &ast::ForeignMod, attrs: &[ast::Attribute]) {
self.print_inner_attributes(attrs);
for item in &nmod.items {
self.print_foreign_item(item);
}
}
pub fn print_opt_lifetime(&mut self, lifetime: &Option<ast::Lifetime>) {
if let Some(lt) = *lifetime {
self.print_lifetime(lt);
self.nbsp();
}
}
pub fn print_assoc_constraint(&mut self, constraint: &ast::AssocTyConstraint) {
self.print_ident(constraint.ident);
constraint.gen_args.as_ref().map(|args| self.print_generic_args(args, false));
self.s.space();
match &constraint.kind {
ast::AssocTyConstraintKind::Equality { ty } => {
self.word_space("=");
self.print_type(ty);
}
ast::AssocTyConstraintKind::Bound { bounds } => {
self.print_type_bounds(":", &*bounds);
}
}
}
pub fn print_generic_arg(&mut self, generic_arg: &GenericArg) {
match generic_arg {
GenericArg::Lifetime(lt) => self.print_lifetime(*lt),
GenericArg::Type(ty) => self.print_type(ty),
GenericArg::Const(ct) => self.print_expr(&ct.value),
}
}
pub fn print_type(&mut self, ty: &ast::Ty) {
self.maybe_print_comment(ty.span.lo());
self.ibox(0);
match ty.kind {
ast::TyKind::Slice(ref ty) => {
self.s.word("[");
self.print_type(ty);
self.s.word("]");
}
ast::TyKind::Ptr(ref mt) => {
self.s.word("*");
self.print_mt(mt, true);
}
ast::TyKind::Rptr(ref lifetime, ref mt) => {
self.s.word("&");
self.print_opt_lifetime(lifetime);
self.print_mt(mt, false);
}
ast::TyKind::Never => {
self.s.word("!");
}
ast::TyKind::Tup(ref elts) => {
self.popen();
self.commasep(Inconsistent, &elts[..], |s, ty| s.print_type(ty));
if elts.len() == 1 {
self.s.word(",");
}
self.pclose();
}
ast::TyKind::AnonymousStruct(ref fields, ..) => {
self.head("struct");
self.print_record_struct_body(&fields, ty.span);
}
ast::TyKind::AnonymousUnion(ref fields, ..) => {
self.head("union");
self.print_record_struct_body(&fields, ty.span);
}
ast::TyKind::Paren(ref typ) => {
self.popen();
self.print_type(typ);
self.pclose();
}
ast::TyKind::BareFn(ref f) => {
self.print_ty_fn(f.ext, f.unsafety, &f.decl, None, &f.generic_params);
}
ast::TyKind::Path(None, ref path) => {
self.print_path(path, false, 0);
}
ast::TyKind::Path(Some(ref qself), ref path) => self.print_qpath(path, qself, false),
ast::TyKind::TraitObject(ref bounds, syntax) => {
let prefix = if syntax == ast::TraitObjectSyntax::Dyn { "dyn" } else { "" };
self.print_type_bounds(prefix, &bounds[..]);
}
ast::TyKind::ImplTrait(_, ref bounds) => {
self.print_type_bounds("impl", &bounds[..]);
}
ast::TyKind::Array(ref ty, ref length) => {
self.s.word("[");
self.print_type(ty);
self.s.word("; ");
self.print_expr(&length.value);
self.s.word("]");
}
ast::TyKind::Typeof(ref e) => {
self.s.word("typeof(");
self.print_expr(&e.value);
self.s.word(")");
}
ast::TyKind::Infer => {
self.s.word("_");
}
ast::TyKind::Err => {
self.popen();
self.s.word("/*ERROR*/");
self.pclose();
}
ast::TyKind::ImplicitSelf => {
self.s.word("Self");
}
ast::TyKind::MacCall(ref m) => {
self.print_mac(m);
}
ast::TyKind::CVarArgs => {
self.s.word("...");
}
}
self.end();
}
crate fn print_foreign_item(&mut self, item: &ast::ForeignItem) {
let ast::Item { id, span, ident, ref attrs, ref kind, ref vis, tokens: _ } = *item;
self.ann.pre(self, AnnNode::SubItem(id));
self.hardbreak_if_not_bol();
self.maybe_print_comment(span.lo());
self.print_outer_attributes(attrs);
match kind {
ast::ForeignItemKind::Fn(box ast::FnKind(def, sig, gen, body)) => {
self.print_fn_full(sig, ident, gen, vis, *def, body.as_deref(), attrs);
}
ast::ForeignItemKind::Static(ty, mutbl, body) => {
let def = ast::Defaultness::Final;
self.print_item_const(ident, Some(*mutbl), ty, body.as_deref(), vis, def);
}
ast::ForeignItemKind::TyAlias(box ast::TyAliasKind(def, generics, bounds, ty)) => {
self.print_associated_type(ident, generics, bounds, ty.as_deref(), vis, *def);
}
ast::ForeignItemKind::MacCall(m) => {
self.print_mac(m);
if m.args.need_semicolon() {
self.s.word(";");
}
}
}
self.ann.post(self, AnnNode::SubItem(id))
}
fn print_item_const(
&mut self,
ident: Ident,
mutbl: Option<ast::Mutability>,
ty: &ast::Ty,
body: Option<&ast::Expr>,
vis: &ast::Visibility,
defaultness: ast::Defaultness,
) {
self.head("");
self.print_visibility(vis);
self.print_defaultness(defaultness);
let leading = match mutbl {
None => "const",
Some(ast::Mutability::Not) => "static",
Some(ast::Mutability::Mut) => "static mut",
};
self.word_space(leading);
self.print_ident(ident);
self.word_space(":");
self.print_type(ty);
self.s.space();
self.end(); // end the head-ibox
if let Some(body) = body {
self.word_space("=");
self.print_expr(body);
}
self.s.word(";");
self.end(); // end the outer cbox
}
fn print_associated_type(
&mut self,
ident: Ident,
generics: &ast::Generics,
bounds: &ast::GenericBounds,
ty: Option<&ast::Ty>,
vis: &ast::Visibility,
defaultness: ast::Defaultness,
) {
self.head("");
self.print_visibility(vis);
self.print_defaultness(defaultness);
self.word_space("type");
self.print_ident(ident);
self.print_generic_params(&generics.params);
self.print_type_bounds(":", bounds);
self.print_where_clause(&generics.where_clause);
if let Some(ty) = ty {
self.s.space();
self.word_space("=");
self.print_type(ty);
}
self.s.word(";");
self.end(); // end inner head-block
self.end(); // end outer head-block
}
/// Pretty-prints an item.
crate fn print_item(&mut self, item: &ast::Item) {
self.hardbreak_if_not_bol();
self.maybe_print_comment(item.span.lo());
self.print_outer_attributes(&item.attrs);
self.ann.pre(self, AnnNode::Item(item));
match item.kind {
ast::ItemKind::ExternCrate(orig_name) => {
self.head(visibility_qualified(&item.vis, "extern crate"));
if let Some(orig_name) = orig_name {
self.print_name(orig_name);
self.s.space();
self.s.word("as");
self.s.space();
}
self.print_ident(item.ident);
self.s.word(";");
self.end(); // end inner head-block
self.end(); // end outer head-block
}
ast::ItemKind::Use(ref tree) => {
self.head(visibility_qualified(&item.vis, "use"));
self.print_use_tree(tree);
self.s.word(";");
self.end(); // end inner head-block
self.end(); // end outer head-block
}
ast::ItemKind::Static(ref ty, mutbl, ref body) => {
let def = ast::Defaultness::Final;
self.print_item_const(item.ident, Some(mutbl), ty, body.as_deref(), &item.vis, def);
}
ast::ItemKind::Const(def, ref ty, ref body) => {
self.print_item_const(item.ident, None, ty, body.as_deref(), &item.vis, def);
}
ast::ItemKind::Fn(box ast::FnKind(def, ref sig, ref gen, ref body)) => {
let body = body.as_deref();
self.print_fn_full(sig, item.ident, gen, &item.vis, def, body, &item.attrs);
}
ast::ItemKind::Mod(unsafety, ref mod_kind) => {
self.head(self.to_string(|s| {
s.print_visibility(&item.vis);
s.print_unsafety(unsafety);
s.word("mod");
}));
self.print_ident(item.ident);
match mod_kind {
ModKind::Loaded(items, ..) => {
self.nbsp();
self.bopen();
self.print_inner_attributes(&item.attrs);
for item in items {
self.print_item(item);
}
self.bclose(item.span);
}
ModKind::Unloaded => {
self.s.word(";");
self.end(); // end inner head-block
self.end(); // end outer head-block
}
}
}
ast::ItemKind::ForeignMod(ref nmod) => {
self.head(self.to_string(|s| {
s.print_unsafety(nmod.unsafety);
s.word("extern");
}));
if let Some(abi) = nmod.abi {
self.print_literal(&abi.as_lit());
self.nbsp();
}
self.bopen();
self.print_foreign_mod(nmod, &item.attrs);
self.bclose(item.span);
}
ast::ItemKind::GlobalAsm(ref asm) => {
self.head(visibility_qualified(&item.vis, "global_asm!"));
self.print_inline_asm(asm);
self.end();
}
ast::ItemKind::TyAlias(box ast::TyAliasKind(def, ref generics, ref bounds, ref ty)) => {
let ty = ty.as_deref();
self.print_associated_type(item.ident, generics, bounds, ty, &item.vis, def);
}
ast::ItemKind::Enum(ref enum_definition, ref params) => {
self.print_enum_def(enum_definition, params, item.ident, item.span, &item.vis);
}
ast::ItemKind::Struct(ref struct_def, ref generics) => {
self.head(visibility_qualified(&item.vis, "struct"));
self.print_struct(struct_def, generics, item.ident, item.span, true);
}
ast::ItemKind::Union(ref struct_def, ref generics) => {
self.head(visibility_qualified(&item.vis, "union"));
self.print_struct(struct_def, generics, item.ident, item.span, true);
}
ast::ItemKind::Impl(box ast::ImplKind {
unsafety,
polarity,
defaultness,
constness,
ref generics,
ref of_trait,
ref self_ty,
ref items,
}) => {
self.head("");
self.print_visibility(&item.vis);
self.print_defaultness(defaultness);
self.print_unsafety(unsafety);
self.word_nbsp("impl");
self.print_constness(constness);
if !generics.params.is_empty() {
self.print_generic_params(&generics.params);
self.s.space();
}
if let ast::ImplPolarity::Negative(_) = polarity {
self.s.word("!");
}
if let Some(ref t) = *of_trait {
self.print_trait_ref(t);
self.s.space();
self.word_space("for");
}
self.print_type(self_ty);
self.print_where_clause(&generics.where_clause);
self.s.space();
self.bopen();
self.print_inner_attributes(&item.attrs);
for impl_item in items {
self.print_assoc_item(impl_item);
}
self.bclose(item.span);
}
ast::ItemKind::Trait(box ast::TraitKind(
is_auto,
unsafety,
ref generics,
ref bounds,
ref trait_items,
)) => {
self.head("");
self.print_visibility(&item.vis);
self.print_unsafety(unsafety);
self.print_is_auto(is_auto);
self.word_nbsp("trait");
self.print_ident(item.ident);
self.print_generic_params(&generics.params);
let mut real_bounds = Vec::with_capacity(bounds.len());
for b in bounds.iter() {
if let GenericBound::Trait(ref ptr, ast::TraitBoundModifier::Maybe) = *b {
self.s.space();
self.word_space("for ?");
self.print_trait_ref(&ptr.trait_ref);
} else {
real_bounds.push(b.clone());
}
}
self.print_type_bounds(":", &real_bounds[..]);
self.print_where_clause(&generics.where_clause);
self.s.word(" ");
self.bopen();
self.print_inner_attributes(&item.attrs);
for trait_item in trait_items {
self.print_assoc_item(trait_item);
}
self.bclose(item.span);
}
ast::ItemKind::TraitAlias(ref generics, ref bounds) => {
self.head("");
self.print_visibility(&item.vis);
self.word_nbsp("trait");
self.print_ident(item.ident);
self.print_generic_params(&generics.params);
let mut real_bounds = Vec::with_capacity(bounds.len());
// FIXME(durka) this seems to be some quite outdated syntax
for b in bounds.iter() {
if let GenericBound::Trait(ref ptr, ast::TraitBoundModifier::Maybe) = *b {
self.s.space();
self.word_space("for ?");
self.print_trait_ref(&ptr.trait_ref);
} else {
real_bounds.push(b.clone());
}
}
self.nbsp();
self.print_type_bounds("=", &real_bounds[..]);
self.print_where_clause(&generics.where_clause);
self.s.word(";");
}
ast::ItemKind::MacCall(ref mac) => {
self.print_mac(mac);
if mac.args.need_semicolon() {
self.s.word(";");
}
}
ast::ItemKind::MacroDef(ref macro_def) => {
self.print_mac_def(macro_def, &item.ident, &item.span, |state| {
state.print_visibility(&item.vis)
});
}
}
self.ann.post(self, AnnNode::Item(item))
}
fn print_trait_ref(&mut self, t: &ast::TraitRef) {
self.print_path(&t.path, false, 0)
}
fn print_formal_generic_params(&mut self, generic_params: &[ast::GenericParam]) {
if !generic_params.is_empty() {
self.s.word("for");
self.print_generic_params(generic_params);
self.nbsp();
}
}
fn print_poly_trait_ref(&mut self, t: &ast::PolyTraitRef) {
self.print_formal_generic_params(&t.bound_generic_params);
self.print_trait_ref(&t.trait_ref)
}
crate fn print_enum_def(
&mut self,
enum_definition: &ast::EnumDef,
generics: &ast::Generics,
ident: Ident,
span: rustc_span::Span,
visibility: &ast::Visibility,
) {
self.head(visibility_qualified(visibility, "enum"));
self.print_ident(ident);
self.print_generic_params(&generics.params);
self.print_where_clause(&generics.where_clause);
self.s.space();
self.print_variants(&enum_definition.variants, span)
}
crate fn print_variants(&mut self, variants: &[ast::Variant], span: rustc_span::Span) {
self.bopen();
for v in variants {
self.space_if_not_bol();
self.maybe_print_comment(v.span.lo());
self.print_outer_attributes(&v.attrs);
self.ibox(INDENT_UNIT);
self.print_variant(v);
self.s.word(",");
self.end();
self.maybe_print_trailing_comment(v.span, None);
}
self.bclose(span)
}
crate fn print_visibility(&mut self, vis: &ast::Visibility) {
match vis.kind {
ast::VisibilityKind::Public => self.word_nbsp("pub"),
ast::VisibilityKind::Crate(sugar) => match sugar {
ast::CrateSugar::PubCrate => self.word_nbsp("pub(crate)"),
ast::CrateSugar::JustCrate => self.word_nbsp("crate"),
},
ast::VisibilityKind::Restricted { ref path, .. } => {
let path = self.to_string(|s| s.print_path(path, false, 0));
if path == "self" || path == "super" {
self.word_nbsp(format!("pub({})", path))
} else {
self.word_nbsp(format!("pub(in {})", path))
}
}
ast::VisibilityKind::Inherited => {}
}
}
crate fn print_defaultness(&mut self, defaultness: ast::Defaultness) {
if let ast::Defaultness::Default(_) = defaultness {
self.word_nbsp("default");
}
}
crate fn print_record_struct_body(&mut self, fields: &[ast::FieldDef], span: rustc_span::Span) {
self.bopen();
self.hardbreak_if_not_bol();
for field in fields {
self.hardbreak_if_not_bol();
self.maybe_print_comment(field.span.lo());
self.print_outer_attributes(&field.attrs);
self.print_visibility(&field.vis);
self.print_ident(field.ident.unwrap());
self.word_nbsp(":");
self.print_type(&field.ty);
self.s.word(",");
}
self.bclose(span)
}
crate fn print_struct(
&mut self,
struct_def: &ast::VariantData,
generics: &ast::Generics,
ident: Ident,
span: rustc_span::Span,
print_finalizer: bool,
) {
self.print_ident(ident);
self.print_generic_params(&generics.params);
match struct_def {
ast::VariantData::Tuple(..) | ast::VariantData::Unit(..) => {
if let ast::VariantData::Tuple(..) = struct_def {
self.popen();
self.commasep(Inconsistent, struct_def.fields(), |s, field| {
s.maybe_print_comment(field.span.lo());
s.print_outer_attributes(&field.attrs);
s.print_visibility(&field.vis);
s.print_type(&field.ty)
});
self.pclose();
}
self.print_where_clause(&generics.where_clause);
if print_finalizer {
self.s.word(";");
}
self.end();
self.end(); // Close the outer-box.
}
ast::VariantData::Struct(ref fields, ..) => {
self.print_where_clause(&generics.where_clause);
self.nbsp();
self.print_record_struct_body(fields, span);
}
}
}
crate fn print_variant(&mut self, v: &ast::Variant) {
self.head("");
self.print_visibility(&v.vis);
let generics = ast::Generics::default();
self.print_struct(&v.data, &generics, v.ident, v.span, false);
if let Some(ref d) = v.disr_expr {
self.s.space();
self.word_space("=");
self.print_expr(&d.value)
}
}
crate fn print_assoc_item(&mut self, item: &ast::AssocItem) {
let ast::Item { id, span, ident, ref attrs, ref kind, ref vis, tokens: _ } = *item;
self.ann.pre(self, AnnNode::SubItem(id));
self.hardbreak_if_not_bol();
self.maybe_print_comment(span.lo());
self.print_outer_attributes(attrs);
match kind {
ast::AssocItemKind::Fn(box ast::FnKind(def, sig, gen, body)) => {
self.print_fn_full(sig, ident, gen, vis, *def, body.as_deref(), attrs);
}
ast::AssocItemKind::Const(def, ty, body) => {
self.print_item_const(ident, None, ty, body.as_deref(), vis, *def);
}
ast::AssocItemKind::TyAlias(box ast::TyAliasKind(def, generics, bounds, ty)) => {
self.print_associated_type(ident, generics, bounds, ty.as_deref(), vis, *def);
}
ast::AssocItemKind::MacCall(m) => {
self.print_mac(m);
if m.args.need_semicolon() {
self.s.word(";");
}
}
}
self.ann.post(self, AnnNode::SubItem(id))
}
crate fn print_stmt(&mut self, st: &ast::Stmt) {
self.maybe_print_comment(st.span.lo());
match st.kind {
ast::StmtKind::Local(ref loc) => {
self.print_outer_attributes(&loc.attrs);
self.space_if_not_bol();
self.ibox(INDENT_UNIT);
self.word_nbsp("let");
self.ibox(INDENT_UNIT);
self.print_local_decl(loc);
self.end();
if let Some(ref init) = loc.init {
self.nbsp();
self.word_space("=");
self.print_expr(init);
}
self.s.word(";");
self.end();
}
ast::StmtKind::Item(ref item) => self.print_item(item),
ast::StmtKind::Expr(ref expr) => {
self.space_if_not_bol();
self.print_expr_outer_attr_style(expr, false);
if classify::expr_requires_semi_to_be_stmt(expr) {
self.s.word(";");
}
}
ast::StmtKind::Semi(ref expr) => {
self.space_if_not_bol();
self.print_expr_outer_attr_style(expr, false);
self.s.word(";");
}
ast::StmtKind::Empty => {
self.space_if_not_bol();
self.s.word(";");
}
ast::StmtKind::MacCall(ref mac) => {
self.space_if_not_bol();
self.print_outer_attributes(&mac.attrs);
self.print_mac(&mac.mac);
if mac.style == ast::MacStmtStyle::Semicolon {
self.s.word(";");
}
}
}
self.maybe_print_trailing_comment(st.span, None)
}
crate fn print_block(&mut self, blk: &ast::Block) {
self.print_block_with_attrs(blk, &[])
}
crate fn print_block_unclosed_indent(&mut self, blk: &ast::Block) {
self.print_block_maybe_unclosed(blk, &[], false)
}
crate fn print_block_with_attrs(&mut self, blk: &ast::Block, attrs: &[ast::Attribute]) {
self.print_block_maybe_unclosed(blk, attrs, true)
}
crate fn print_block_maybe_unclosed(
&mut self,
blk: &ast::Block,
attrs: &[ast::Attribute],
close_box: bool,
) {
match blk.rules {
BlockCheckMode::Unsafe(..) => self.word_space("unsafe"),
BlockCheckMode::Default => (),
}
self.maybe_print_comment(blk.span.lo());
self.ann.pre(self, AnnNode::Block(blk));
self.bopen();
self.print_inner_attributes(attrs);
for (i, st) in blk.stmts.iter().enumerate() {
match st.kind {
ast::StmtKind::Expr(ref expr) if i == blk.stmts.len() - 1 => {
self.maybe_print_comment(st.span.lo());
self.space_if_not_bol();
self.print_expr_outer_attr_style(expr, false);
self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi()));
}
_ => self.print_stmt(st),
}
}
self.bclose_maybe_open(blk.span, close_box);
self.ann.post(self, AnnNode::Block(blk))
}
/// Print a `let pat = expr` expression.
crate fn print_let(&mut self, pat: &ast::Pat, expr: &ast::Expr) {
self.s.word("let ");
self.print_pat(pat);
self.s.space();
self.word_space("=");
let npals = || parser::needs_par_as_let_scrutinee(expr.precedence().order());
self.print_expr_cond_paren(expr, Self::cond_needs_par(expr) || npals())
}
fn print_else(&mut self, els: Option<&ast::Expr>) {
if let Some(_else) = els {
match _else.kind {
// Another `else if` block.
ast::ExprKind::If(ref i, ref then, ref e) => {
self.cbox(INDENT_UNIT - 1);
self.ibox(0);
self.s.word(" else if ");
self.print_expr_as_cond(i);
self.s.space();
self.print_block(then);
self.print_else(e.as_deref())
}
// Final `else` block.
ast::ExprKind::Block(ref b, _) => {
self.cbox(INDENT_UNIT - 1);
self.ibox(0);
self.s.word(" else ");
self.print_block(b)
}
// Constraints would be great here!
_ => {
panic!("print_if saw if with weird alternative");
}
}
}
}
crate fn print_if(&mut self, test: &ast::Expr, blk: &ast::Block, elseopt: Option<&ast::Expr>) {
self.head("if");
self.print_expr_as_cond(test);
self.s.space();
self.print_block(blk);
self.print_else(elseopt)
}
crate fn print_mac(&mut self, m: &ast::MacCall) {
self.print_mac_common(
Some(MacHeader::Path(&m.path)),
true,
None,
m.args.delim(),
&m.args.inner_tokens(),
true,
m.span(),
);
}
fn print_call_post(&mut self, args: &[P<ast::Expr>]) {
self.popen();
self.commasep_exprs(Inconsistent, args);
self.pclose()
}
crate fn print_expr_maybe_paren(&mut self, expr: &ast::Expr, prec: i8) {
self.print_expr_cond_paren(expr, expr.precedence().order() < prec)
}
/// Prints an expr using syntax that's acceptable in a condition position, such as the `cond` in
/// `if cond { ... }`.
crate fn print_expr_as_cond(&mut self, expr: &ast::Expr) {
self.print_expr_cond_paren(expr, Self::cond_needs_par(expr))
}
// Does `expr` need parenthesis when printed in a condition position?
//
// These cases need parens due to the parse error observed in #26461: `if return {}`
// parses as the erroneous construct `if (return {})`, not `if (return) {}`.
fn cond_needs_par(expr: &ast::Expr) -> bool {
match expr.kind {
ast::ExprKind::Break(..) | ast::ExprKind::Closure(..) | ast::ExprKind::Ret(..) => true,
_ => parser::contains_exterior_struct_lit(expr),
}
}
/// Prints `expr` or `(expr)` when `needs_par` holds.
fn print_expr_cond_paren(&mut self, expr: &ast::Expr, needs_par: bool) {
if needs_par {
self.popen();
}
self.print_expr(expr);
if needs_par {
self.pclose();
}
}
fn print_expr_vec(&mut self, exprs: &[P<ast::Expr>]) {
self.ibox(INDENT_UNIT);
self.s.word("[");
self.commasep_exprs(Inconsistent, exprs);
self.s.word("]");
self.end();
}
fn print_expr_anon_const(&mut self, expr: &ast::AnonConst) {
self.ibox(INDENT_UNIT);
self.s.word("const");
self.print_expr(&expr.value);
self.end();
}
fn print_expr_repeat(&mut self, element: &ast::Expr, count: &ast::AnonConst) {
self.ibox(INDENT_UNIT);
self.s.word("[");
self.print_expr(element);
self.word_space(";");
self.print_expr(&count.value);
self.s.word("]");
self.end();
}
fn print_expr_struct(
&mut self,
qself: &Option<ast::QSelf>,
path: &ast::Path,
fields: &[ast::ExprField],
rest: &ast::StructRest,
) {
if let Some(qself) = qself {
self.print_qpath(path, qself, true);
} else {
self.print_path(path, true, 0);
}
self.s.word("{");
self.commasep_cmnt(
Consistent,
fields,
|s, field| {
s.print_outer_attributes(&field.attrs);
s.ibox(INDENT_UNIT);
if !field.is_shorthand {
s.print_ident(field.ident);
s.word_space(":");
}
s.print_expr(&field.expr);
s.end();
},
|f| f.span,
);
match rest {
ast::StructRest::Base(_) | ast::StructRest::Rest(_) => {
self.ibox(INDENT_UNIT);
if !fields.is_empty() {
self.s.word(",");
self.s.space();
}
self.s.word("..");
if let ast::StructRest::Base(ref expr) = *rest {
self.print_expr(expr);
}
self.end();
}
ast::StructRest::None if !fields.is_empty() => self.s.word(","),
_ => {}
}
self.s.word("}");
}
fn print_expr_tup(&mut self, exprs: &[P<ast::Expr>]) {
self.popen();
self.commasep_exprs(Inconsistent, exprs);
if exprs.len() == 1 {
self.s.word(",");
}
self.pclose()
}
fn print_expr_call(&mut self, func: &ast::Expr, args: &[P<ast::Expr>]) {
let prec = match func.kind {
ast::ExprKind::Field(..) => parser::PREC_FORCE_PAREN,
_ => parser::PREC_POSTFIX,
};
self.print_expr_maybe_paren(func, prec);
self.print_call_post(args)
}
fn print_expr_method_call(&mut self, segment: &ast::PathSegment, args: &[P<ast::Expr>]) {
let base_args = &args[1..];
self.print_expr_maybe_paren(&args[0], parser::PREC_POSTFIX);
self.s.word(".");
self.print_ident(segment.ident);
if let Some(ref args) = segment.args {
self.print_generic_args(args, true);
}
self.print_call_post(base_args)
}
fn print_expr_binary(&mut self, op: ast::BinOp, lhs: &ast::Expr, rhs: &ast::Expr) {
let assoc_op = AssocOp::from_ast_binop(op.node);
let prec = assoc_op.precedence() as i8;
let fixity = assoc_op.fixity();
let (left_prec, right_prec) = match fixity {
Fixity::Left => (prec, prec + 1),
Fixity::Right => (prec + 1, prec),
Fixity::None => (prec + 1, prec + 1),
};
let left_prec = match (&lhs.kind, op.node) {
// These cases need parens: `x as i32 < y` has the parser thinking that `i32 < y` is
// the beginning of a path type. It starts trying to parse `x as (i32 < y ...` instead
// of `(x as i32) < ...`. We need to convince it _not_ to do that.
(&ast::ExprKind::Cast { .. }, ast::BinOpKind::Lt | ast::BinOpKind::Shl) => {
parser::PREC_FORCE_PAREN
}
// We are given `(let _ = a) OP b`.
//
// - When `OP <= LAnd` we should print `let _ = a OP b` to avoid redundant parens
// as the parser will interpret this as `(let _ = a) OP b`.
//
// - Otherwise, e.g. when we have `(let a = b) < c` in AST,
// parens are required since the parser would interpret `let a = b < c` as
// `let a = (b < c)`. To achieve this, we force parens.
(&ast::ExprKind::Let { .. }, _) if !parser::needs_par_as_let_scrutinee(prec) => {
parser::PREC_FORCE_PAREN
}
_ => left_prec,
};
self.print_expr_maybe_paren(lhs, left_prec);
self.s.space();
self.word_space(op.node.to_string());
self.print_expr_maybe_paren(rhs, right_prec)
}
fn print_expr_unary(&mut self, op: ast::UnOp, expr: &ast::Expr) {
self.s.word(ast::UnOp::to_string(op));
self.print_expr_maybe_paren(expr, parser::PREC_PREFIX)
}
fn print_expr_addr_of(
&mut self,
kind: ast::BorrowKind,
mutability: ast::Mutability,
expr: &ast::Expr,
) {
self.s.word("&");
match kind {
ast::BorrowKind::Ref => self.print_mutability(mutability, false),
ast::BorrowKind::Raw => {
self.word_nbsp("raw");
self.print_mutability(mutability, true);
}
}
self.print_expr_maybe_paren(expr, parser::PREC_PREFIX)
}
pub fn print_expr(&mut self, expr: &ast::Expr) {
self.print_expr_outer_attr_style(expr, true)
}
fn print_expr_outer_attr_style(&mut self, expr: &ast::Expr, is_inline: bool) {
self.maybe_print_comment(expr.span.lo());
let attrs = &expr.attrs;
if is_inline {
self.print_outer_attributes_inline(attrs);
} else {
self.print_outer_attributes(attrs);
}
self.ibox(INDENT_UNIT);
self.ann.pre(self, AnnNode::Expr(expr));
match expr.kind {
ast::ExprKind::Box(ref expr) => {
self.word_space("box");
self.print_expr_maybe_paren(expr, parser::PREC_PREFIX);
}
ast::ExprKind::Array(ref exprs) => {
self.print_expr_vec(exprs);
}
ast::ExprKind::ConstBlock(ref anon_const) => {
self.print_expr_anon_const(anon_const);
}
ast::ExprKind::Repeat(ref element, ref count) => {
self.print_expr_repeat(element, count);
}
ast::ExprKind::Struct(ref se) => {
self.print_expr_struct(&se.qself, &se.path, &se.fields, &se.rest);
}
ast::ExprKind::Tup(ref exprs) => {
self.print_expr_tup(exprs);
}
ast::ExprKind::Call(ref func, ref args) => {
self.print_expr_call(func, &args[..]);
}
ast::ExprKind::MethodCall(ref segment, ref args, _) => {
self.print_expr_method_call(segment, &args[..]);
}
ast::ExprKind::Binary(op, ref lhs, ref rhs) => {
self.print_expr_binary(op, lhs, rhs);
}
ast::ExprKind::Unary(op, ref expr) => {
self.print_expr_unary(op, expr);
}
ast::ExprKind::AddrOf(k, m, ref expr) => {
self.print_expr_addr_of(k, m, expr);
}
ast::ExprKind::Lit(ref lit) => {
self.print_literal(lit);
}
ast::ExprKind::Cast(ref expr, ref ty) => {
let prec = AssocOp::As.precedence() as i8;
self.print_expr_maybe_paren(expr, prec);
self.s.space();
self.word_space("as");
self.print_type(ty);
}
ast::ExprKind::Type(ref expr, ref ty) => {
let prec = AssocOp::Colon.precedence() as i8;
self.print_expr_maybe_paren(expr, prec);
self.word_space(":");
self.print_type(ty);
}
ast::ExprKind::Let(ref pat, ref scrutinee, _) => {
self.print_let(pat, scrutinee);
}
ast::ExprKind::If(ref test, ref blk, ref elseopt) => {
self.print_if(test, blk, elseopt.as_deref())
}
ast::ExprKind::While(ref test, ref blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident);
self.word_space(":");
}
self.head("while");
self.print_expr_as_cond(test);
self.s.space();
self.print_block_with_attrs(blk, attrs);
}
ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident);
self.word_space(":");
}
self.head("for");
self.print_pat(pat);
self.s.space();
self.word_space("in");
self.print_expr_as_cond(iter);
self.s.space();
self.print_block_with_attrs(blk, attrs);
}
ast::ExprKind::Loop(ref blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident);
self.word_space(":");
}
self.head("loop");
self.print_block_with_attrs(blk, attrs);
}
ast::ExprKind::Match(ref expr, ref arms) => {
self.cbox(INDENT_UNIT);
self.ibox(INDENT_UNIT);
self.word_nbsp("match");
self.print_expr_as_cond(expr);
self.s.space();
self.bopen();
self.print_inner_attributes_no_trailing_hardbreak(attrs);
for arm in arms {
self.print_arm(arm);
}
self.bclose(expr.span);
}
ast::ExprKind::Closure(
capture_clause,
asyncness,
movability,
ref decl,
ref body,
_,
) => {
self.print_movability(movability);
self.print_asyncness(asyncness);
self.print_capture_clause(capture_clause);
self.print_fn_params_and_ret(decl, true);
self.s.space();
self.print_expr(body);
self.end(); // need to close a box
// a box will be closed by print_expr, but we didn't want an overall
// wrapper so we closed the corresponding opening. so create an
// empty box to satisfy the close.
self.ibox(0);
}
ast::ExprKind::Block(ref blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident);
self.word_space(":");
}
// containing cbox, will be closed by print-block at }
self.cbox(INDENT_UNIT);
// head-box, will be closed by print-block after {
self.ibox(0);
self.print_block_with_attrs(blk, attrs);
}
ast::ExprKind::Async(capture_clause, _, ref blk) => {
self.word_nbsp("async");
self.print_capture_clause(capture_clause);
self.s.space();
// cbox/ibox in analogy to the `ExprKind::Block` arm above
self.cbox(INDENT_UNIT);
self.ibox(0);
self.print_block_with_attrs(blk, attrs);
}
ast::ExprKind::Await(ref expr) => {
self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX);
self.s.word(".await");
}
ast::ExprKind::Assign(ref lhs, ref rhs, _) => {
let prec = AssocOp::Assign.precedence() as i8;
self.print_expr_maybe_paren(lhs, prec + 1);
self.s.space();
self.word_space("=");
self.print_expr_maybe_paren(rhs, prec);
}
ast::ExprKind::AssignOp(op, ref lhs, ref rhs) => {
let prec = AssocOp::Assign.precedence() as i8;
self.print_expr_maybe_paren(lhs, prec + 1);
self.s.space();
self.s.word(op.node.to_string());
self.word_space("=");
self.print_expr_maybe_paren(rhs, prec);
}
ast::ExprKind::Field(ref expr, ident) => {
self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX);
self.s.word(".");
self.print_ident(ident);
}
ast::ExprKind::Index(ref expr, ref index) => {
self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX);
self.s.word("[");
self.print_expr(index);
self.s.word("]");
}
ast::ExprKind::Range(ref start, ref end, limits) => {
// Special case for `Range`. `AssocOp` claims that `Range` has higher precedence
// than `Assign`, but `x .. x = x` gives a parse error instead of `x .. (x = x)`.
// Here we use a fake precedence value so that any child with lower precedence than
// a "normal" binop gets parenthesized. (`LOr` is the lowest-precedence binop.)
let fake_prec = AssocOp::LOr.precedence() as i8;
if let Some(ref e) = *start {
self.print_expr_maybe_paren(e, fake_prec);
}
if limits == ast::RangeLimits::HalfOpen {
self.s.word("..");
} else {
self.s.word("..=");
}
if let Some(ref e) = *end {
self.print_expr_maybe_paren(e, fake_prec);
}
}
ast::ExprKind::Underscore => self.s.word("_"),
ast::ExprKind::Path(None, ref path) => self.print_path(path, true, 0),
ast::ExprKind::Path(Some(ref qself), ref path) => self.print_qpath(path, qself, true),
ast::ExprKind::Break(opt_label, ref opt_expr) => {
self.s.word("break");
self.s.space();
if let Some(label) = opt_label {
self.print_ident(label.ident);
self.s.space();
}
if let Some(ref expr) = *opt_expr {
self.print_expr_maybe_paren(expr, parser::PREC_JUMP);
self.s.space();
}
}
ast::ExprKind::Continue(opt_label) => {
self.s.word("continue");
self.s.space();
if let Some(label) = opt_label {
self.print_ident(label.ident);
self.s.space()
}
}
ast::ExprKind::Ret(ref result) => {
self.s.word("return");
if let Some(ref expr) = *result {
self.s.word(" ");
self.print_expr_maybe_paren(expr, parser::PREC_JUMP);
}
}
ast::ExprKind::InlineAsm(ref a) => {
self.word("asm!");
self.print_inline_asm(a);
}
ast::ExprKind::LlvmInlineAsm(ref a) => {
self.s.word("llvm_asm!");
self.popen();
self.print_symbol(a.asm, a.asm_str_style);
self.word_space(":");
self.commasep(Inconsistent, &a.outputs, |s, out| {
let constraint = out.constraint.as_str();
let mut ch = constraint.chars();
match ch.next() {
Some('=') if out.is_rw => {
s.print_string(&format!("+{}", ch.as_str()), ast::StrStyle::Cooked)
}
_ => s.print_string(&constraint, ast::StrStyle::Cooked),
}
s.popen();
s.print_expr(&out.expr);
s.pclose();
});
self.s.space();
self.word_space(":");
self.commasep(Inconsistent, &a.inputs, |s, &(co, ref o)| {
s.print_symbol(co, ast::StrStyle::Cooked);
s.popen();
s.print_expr(o);
s.pclose();
});
self.s.space();
self.word_space(":");
self.commasep(Inconsistent, &a.clobbers, |s, &co| {
s.print_symbol(co, ast::StrStyle::Cooked);
});
let mut options = vec![];
if a.volatile {
options.push("volatile");
}
if a.alignstack {
options.push("alignstack");
}
if a.dialect == ast::LlvmAsmDialect::Intel {
options.push("intel");
}
if !options.is_empty() {
self.s.space();
self.word_space(":");
self.commasep(Inconsistent, &options, |s, &co| {
s.print_string(co, ast::StrStyle::Cooked);
});
}
self.pclose();
}
ast::ExprKind::MacCall(ref m) => self.print_mac(m),
ast::ExprKind::Paren(ref e) => {
self.popen();
self.print_expr(e);
self.pclose();
}
ast::ExprKind::Yield(ref e) => {
self.s.word("yield");
if let Some(ref expr) = *e {
self.s.space();
self.print_expr_maybe_paren(expr, parser::PREC_JUMP);
}
}
ast::ExprKind::Try(ref e) => {
self.print_expr_maybe_paren(e, parser::PREC_POSTFIX);
self.s.word("?")
}
ast::ExprKind::TryBlock(ref blk) => {
self.head("try");
self.s.space();
self.print_block_with_attrs(blk, attrs)
}
ast::ExprKind::Err => {
self.popen();
self.s.word("/*ERROR*/");
self.pclose()
}
}
self.ann.post(self, AnnNode::Expr(expr));
self.end();
}
fn print_inline_asm(&mut self, asm: &ast::InlineAsm) {
enum AsmArg<'a> {
Template(String),
Operand(&'a InlineAsmOperand),
ClobberAbi(Symbol),
Options(InlineAsmOptions),
}
let mut args = vec![AsmArg::Template(InlineAsmTemplatePiece::to_string(&asm.template))];
args.extend(asm.operands.iter().map(|(o, _)| AsmArg::Operand(o)));
if let Some((abi, _)) = asm.clobber_abi {
args.push(AsmArg::ClobberAbi(abi));
}
if !asm.options.is_empty() {
args.push(AsmArg::Options(asm.options));
}
self.popen();
self.commasep(Consistent, &args, |s, arg| match arg {
AsmArg::Template(template) => s.print_string(&template, ast::StrStyle::Cooked),
AsmArg::Operand(op) => {
let print_reg_or_class = |s: &mut Self, r: &InlineAsmRegOrRegClass| match r {
InlineAsmRegOrRegClass::Reg(r) => s.print_symbol(*r, ast::StrStyle::Cooked),
InlineAsmRegOrRegClass::RegClass(r) => s.word(r.to_string()),
};
match op {
InlineAsmOperand::In { reg, expr } => {
s.word("in");
s.popen();
print_reg_or_class(s, reg);
s.pclose();
s.space();
s.print_expr(expr);
}
InlineAsmOperand::Out { reg, late, expr } => {
s.word(if *late { "lateout" } else { "out" });
s.popen();
print_reg_or_class(s, reg);
s.pclose();
s.space();
match expr {
Some(expr) => s.print_expr(expr),
None => s.word("_"),
}
}
InlineAsmOperand::InOut { reg, late, expr } => {
s.word(if *late { "inlateout" } else { "inout" });
s.popen();
print_reg_or_class(s, reg);
s.pclose();
s.space();
s.print_expr(expr);
}
InlineAsmOperand::SplitInOut { reg, late, in_expr, out_expr } => {
s.word(if *late { "inlateout" } else { "inout" });
s.popen();
print_reg_or_class(s, reg);
s.pclose();
s.space();
s.print_expr(in_expr);
s.space();
s.word_space("=>");
match out_expr {
Some(out_expr) => s.print_expr(out_expr),
None => s.word("_"),
}
}
InlineAsmOperand::Const { anon_const } => {
s.word("const");
s.space();
s.print_expr(&anon_const.value);
}
InlineAsmOperand::Sym { expr } => {
s.word("sym");
s.space();
s.print_expr(expr);
}
}
}
AsmArg::ClobberAbi(abi) => {
s.word("clobber_abi");
s.popen();
s.print_symbol(*abi, ast::StrStyle::Cooked);
s.pclose();
}
AsmArg::Options(opts) => {
s.word("options");
s.popen();
let mut options = vec![];
if opts.contains(InlineAsmOptions::PURE) {
options.push("pure");
}
if opts.contains(InlineAsmOptions::NOMEM) {
options.push("nomem");
}
if opts.contains(InlineAsmOptions::READONLY) {
options.push("readonly");
}
if opts.contains(InlineAsmOptions::PRESERVES_FLAGS) {
options.push("preserves_flags");
}
if opts.contains(InlineAsmOptions::NORETURN) {
options.push("noreturn");
}
if opts.contains(InlineAsmOptions::NOSTACK) {
options.push("nostack");
}
if opts.contains(InlineAsmOptions::ATT_SYNTAX) {
options.push("att_syntax");
}
if opts.contains(InlineAsmOptions::RAW) {
options.push("raw");
}
s.commasep(Inconsistent, &options, |s, &opt| {
s.word(opt);
});
s.pclose();
}
});
self.pclose();
}
crate fn print_local_decl(&mut self, loc: &ast::Local) {
self.print_pat(&loc.pat);
if let Some(ref ty) = loc.ty {
self.word_space(":");
self.print_type(ty);
}
}
crate fn print_name(&mut self, name: Symbol) {
self.s.word(name.to_string());
self.ann.post(self, AnnNode::Name(&name))
}
fn print_qpath(&mut self, path: &ast::Path, qself: &ast::QSelf, colons_before_params: bool) {
self.s.word("<");
self.print_type(&qself.ty);
if qself.position > 0 {
self.s.space();
self.word_space("as");
let depth = path.segments.len() - qself.position;
self.print_path(path, false, depth);
}
self.s.word(">");
for item_segment in &path.segments[qself.position..] {
self.s.word("::");
self.print_ident(item_segment.ident);
if let Some(ref args) = item_segment.args {
self.print_generic_args(args, colons_before_params)
}
}
}
crate fn print_pat(&mut self, pat: &ast::Pat) {
self.maybe_print_comment(pat.span.lo());
self.ann.pre(self, AnnNode::Pat(pat));
/* Pat isn't normalized, but the beauty of it
is that it doesn't matter */
match pat.kind {
PatKind::Wild => self.s.word("_"),
PatKind::Ident(binding_mode, ident, ref sub) => {
match binding_mode {
ast::BindingMode::ByRef(mutbl) => {
self.word_nbsp("ref");
self.print_mutability(mutbl, false);
}
ast::BindingMode::ByValue(ast::Mutability::Not) => {}
ast::BindingMode::ByValue(ast::Mutability::Mut) => {
self.word_nbsp("mut");
}
}
self.print_ident(ident);
if let Some(ref p) = *sub {
self.s.space();
self.s.word_space("@");
self.print_pat(p);
}
}
PatKind::TupleStruct(ref qself, ref path, ref elts) => {
if let Some(qself) = qself {
self.print_qpath(path, qself, true);
} else {
self.print_path(path, true, 0);
}
self.popen();
self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p));
self.pclose();
}
PatKind::Or(ref pats) => {
self.strsep("|", true, Inconsistent, &pats[..], |s, p| s.print_pat(p));
}
PatKind::Path(None, ref path) => {
self.print_path(path, true, 0);
}
PatKind::Path(Some(ref qself), ref path) => {
self.print_qpath(path, qself, false);
}
PatKind::Struct(ref qself, ref path, ref fields, etc) => {
if let Some(qself) = qself {
self.print_qpath(path, qself, true);
} else {
self.print_path(path, true, 0);
}
self.nbsp();
self.word_space("{");
self.commasep_cmnt(
Consistent,
&fields[..],
|s, f| {
s.cbox(INDENT_UNIT);
if !f.is_shorthand {
s.print_ident(f.ident);
s.word_nbsp(":");
}
s.print_pat(&f.pat);
s.end();
},
|f| f.pat.span,
);
if etc {
if !fields.is_empty() {
self.word_space(",");
}
self.s.word("..");
}
self.s.space();
self.s.word("}");
}
PatKind::Tuple(ref elts) => {
self.popen();
self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p));
if elts.len() == 1 {
self.s.word(",");
}
self.pclose();
}
PatKind::Box(ref inner) => {
self.s.word("box ");
self.print_pat(inner);
}
PatKind::Ref(ref inner, mutbl) => {
self.s.word("&");
if mutbl == ast::Mutability::Mut {
self.s.word("mut ");
}
if let PatKind::Ident(ast::BindingMode::ByValue(ast::Mutability::Mut), ..) =
inner.kind
{
self.popen();
self.print_pat(inner);
self.pclose();
} else {
self.print_pat(inner);
}
}
PatKind::Lit(ref e) => self.print_expr(&**e),
PatKind::Range(ref begin, ref end, Spanned { node: ref end_kind, .. }) => {
if let Some(e) = begin {
self.print_expr(e);
self.s.space();
}
match *end_kind {
RangeEnd::Included(RangeSyntax::DotDotDot) => self.s.word("..."),
RangeEnd::Included(RangeSyntax::DotDotEq) => self.s.word("..="),
RangeEnd::Excluded => self.s.word(".."),
}
if let Some(e) = end {
self.print_expr(e);
}
}
PatKind::Slice(ref elts) => {
self.s.word("[");
self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p));
self.s.word("]");
}
PatKind::Rest => self.s.word(".."),
PatKind::Paren(ref inner) => {
self.popen();
self.print_pat(inner);
self.pclose();
}
PatKind::MacCall(ref m) => self.print_mac(m),
}
self.ann.post(self, AnnNode::Pat(pat))
}
fn print_arm(&mut self, arm: &ast::Arm) {
// Note, I have no idea why this check is necessary, but here it is.
if arm.attrs.is_empty() {
self.s.space();
}
self.cbox(INDENT_UNIT);
self.ibox(0);
self.maybe_print_comment(arm.pat.span.lo());
self.print_outer_attributes(&arm.attrs);
self.print_pat(&arm.pat);
self.s.space();
if let Some(ref e) = arm.guard {
self.word_space("if");
self.print_expr(e);
self.s.space();
}
self.word_space("=>");
match arm.body.kind {
ast::ExprKind::Block(ref blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident);
self.word_space(":");
}
// The block will close the pattern's ibox.
self.print_block_unclosed_indent(blk);
// If it is a user-provided unsafe block, print a comma after it.
if let BlockCheckMode::Unsafe(ast::UserProvided) = blk.rules {
self.s.word(",");
}
}
_ => {
self.end(); // Close the ibox for the pattern.
self.print_expr(&arm.body);
self.s.word(",");
}
}
self.end(); // Close enclosing cbox.
}
fn print_explicit_self(&mut self, explicit_self: &ast::ExplicitSelf) {
match explicit_self.node {
SelfKind::Value(m) => {
self.print_mutability(m, false);
self.s.word("self")
}
SelfKind::Region(ref lt, m) => {
self.s.word("&");
self.print_opt_lifetime(lt);
self.print_mutability(m, false);
self.s.word("self")
}
SelfKind::Explicit(ref typ, m) => {
self.print_mutability(m, false);
self.s.word("self");
self.word_space(":");
self.print_type(typ)
}
}
}
fn print_fn_full(
&mut self,
sig: &ast::FnSig,
name: Ident,
generics: &ast::Generics,
vis: &ast::Visibility,
defaultness: ast::Defaultness,
body: Option<&ast::Block>,
attrs: &[ast::Attribute],
) {
if body.is_some() {
self.head("");
}
self.print_visibility(vis);
self.print_defaultness(defaultness);
self.print_fn(&sig.decl, sig.header, Some(name), generics);
if let Some(body) = body {
self.nbsp();
self.print_block_with_attrs(body, attrs);
} else {
self.s.word(";");
}
}
crate fn print_fn(
&mut self,
decl: &ast::FnDecl,
header: ast::FnHeader,
name: Option<Ident>,
generics: &ast::Generics,
) {
self.print_fn_header_info(header);
if let Some(name) = name {
self.nbsp();
self.print_ident(name);
}
self.print_generic_params(&generics.params);
self.print_fn_params_and_ret(decl, false);
self.print_where_clause(&generics.where_clause)
}
crate fn print_fn_params_and_ret(&mut self, decl: &ast::FnDecl, is_closure: bool) {
let (open, close) = if is_closure { ("|", "|") } else { ("(", ")") };
self.word(open);
self.commasep(Inconsistent, &decl.inputs, |s, param| s.print_param(param, is_closure));
self.word(close);
self.print_fn_ret_ty(&decl.output)
}
crate fn print_movability(&mut self, movability: ast::Movability) {
match movability {
ast::Movability::Static => self.word_space("static"),
ast::Movability::Movable => {}
}
}
crate fn print_asyncness(&mut self, asyncness: ast::Async) {
if asyncness.is_async() {
self.word_nbsp("async");
}
}
crate fn print_capture_clause(&mut self, capture_clause: ast::CaptureBy) {
match capture_clause {
ast::CaptureBy::Value => self.word_space("move"),
ast::CaptureBy::Ref => {}
}
}
pub fn print_type_bounds(&mut self, prefix: &'static str, bounds: &[ast::GenericBound]) {
if !bounds.is_empty() {
self.s.word(prefix);
let mut first = true;
for bound in bounds {
if !(first && prefix.is_empty()) {
self.nbsp();
}
if first {
first = false;
} else {
self.word_space("+");
}
match bound {
GenericBound::Trait(tref, modifier) => {
if modifier == &TraitBoundModifier::Maybe {
self.s.word("?");
}
self.print_poly_trait_ref(tref);
}
GenericBound::Outlives(lt) => self.print_lifetime(*lt),
}
}
}
}
crate fn print_lifetime(&mut self, lifetime: ast::Lifetime) {
self.print_name(lifetime.ident.name)
}
crate fn print_lifetime_bounds(
&mut self,
lifetime: ast::Lifetime,
bounds: &ast::GenericBounds,
) {
self.print_lifetime(lifetime);
if !bounds.is_empty() {
self.s.word(": ");
for (i, bound) in bounds.iter().enumerate() {
if i != 0 {
self.s.word(" + ");
}
match bound {
ast::GenericBound::Outlives(lt) => self.print_lifetime(*lt),
_ => panic!(),
}
}
}
}
crate fn print_generic_params(&mut self, generic_params: &[ast::GenericParam]) {
if generic_params.is_empty() {
return;
}
self.s.word("<");
self.commasep(Inconsistent, &generic_params, |s, param| {
s.print_outer_attributes_inline(¶m.attrs);
match param.kind {
ast::GenericParamKind::Lifetime => {
let lt = ast::Lifetime { id: param.id, ident: param.ident };
s.print_lifetime_bounds(lt, ¶m.bounds)
}
ast::GenericParamKind::Type { ref default } => {
s.print_ident(param.ident);
s.print_type_bounds(":", ¶m.bounds);
if let Some(ref default) = default {
s.s.space();
s.word_space("=");
s.print_type(default)
}
}
ast::GenericParamKind::Const { ref ty, kw_span: _, ref default } => {
s.word_space("const");
s.print_ident(param.ident);
s.s.space();
s.word_space(":");
s.print_type(ty);
s.print_type_bounds(":", ¶m.bounds);
if let Some(ref default) = default {
s.s.space();
s.word_space("=");
s.print_expr(&default.value);
}
}
}
});
self.s.word(">");
}
crate fn print_where_clause(&mut self, where_clause: &ast::WhereClause) {
if where_clause.predicates.is_empty() && !where_clause.has_where_token {
return;
}
self.s.space();
self.word_space("where");
for (i, predicate) in where_clause.predicates.iter().enumerate() {
if i != 0 {
self.word_space(",");
}
match *predicate {
ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate {
ref bound_generic_params,
ref bounded_ty,
ref bounds,
..
}) => {
self.print_formal_generic_params(bound_generic_params);
self.print_type(bounded_ty);
self.print_type_bounds(":", bounds);
}
ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate {
ref lifetime,
ref bounds,
..
}) => {
self.print_lifetime_bounds(*lifetime, bounds);
}
ast::WherePredicate::EqPredicate(ast::WhereEqPredicate {
ref lhs_ty,
ref rhs_ty,
..
}) => {
self.print_type(lhs_ty);
self.s.space();
self.word_space("=");
self.print_type(rhs_ty);
}
}
}
}
crate fn print_use_tree(&mut self, tree: &ast::UseTree) {
match tree.kind {
ast::UseTreeKind::Simple(rename, ..) => {
self.print_path(&tree.prefix, false, 0);
if let Some(rename) = rename {
self.s.space();
self.word_space("as");
self.print_ident(rename);
}
}
ast::UseTreeKind::Glob => {
if !tree.prefix.segments.is_empty() {
self.print_path(&tree.prefix, false, 0);
self.s.word("::");
}
self.s.word("*");
}
ast::UseTreeKind::Nested(ref items) => {
if tree.prefix.segments.is_empty() {
self.s.word("{");
} else {
self.print_path(&tree.prefix, false, 0);
self.s.word("::{");
}
self.commasep(Inconsistent, &items[..], |this, &(ref tree, _)| {
this.print_use_tree(tree)
});
self.s.word("}");
}
}
}
pub fn print_mutability(&mut self, mutbl: ast::Mutability, print_const: bool) {
match mutbl {
ast::Mutability::Mut => self.word_nbsp("mut"),
ast::Mutability::Not => {
if print_const {
self.word_nbsp("const");
}
}
}
}
crate fn print_mt(&mut self, mt: &ast::MutTy, print_const: bool) {
self.print_mutability(mt.mutbl, print_const);
self.print_type(&mt.ty)
}
crate fn print_param(&mut self, input: &ast::Param, is_closure: bool) {
self.ibox(INDENT_UNIT);
self.print_outer_attributes_inline(&input.attrs);
match input.ty.kind {
ast::TyKind::Infer if is_closure => self.print_pat(&input.pat),
_ => {
if let Some(eself) = input.to_self() {
self.print_explicit_self(&eself);
} else {
let invalid = if let PatKind::Ident(_, ident, _) = input.pat.kind {
ident.name == kw::Empty
} else {
false
};
if !invalid {
self.print_pat(&input.pat);
self.s.word(":");
self.s.space();
}
self.print_type(&input.ty);
}
}
}
self.end();
}
crate fn print_fn_ret_ty(&mut self, fn_ret_ty: &ast::FnRetTy) {
if let ast::FnRetTy::Ty(ty) = fn_ret_ty {
self.space_if_not_bol();
self.ibox(INDENT_UNIT);
self.word_space("->");
self.print_type(ty);
self.end();
self.maybe_print_comment(ty.span.lo());
}
}
crate fn print_ty_fn(
&mut self,
ext: ast::Extern,
unsafety: ast::Unsafe,
decl: &ast::FnDecl,
name: Option<Ident>,
generic_params: &[ast::GenericParam],
) {
self.ibox(INDENT_UNIT);
if !generic_params.is_empty() {
self.s.word("for");
self.print_generic_params(generic_params);
}
let generics = ast::Generics {
params: Vec::new(),
where_clause: ast::WhereClause {
has_where_token: false,
predicates: Vec::new(),
span: rustc_span::DUMMY_SP,
},
span: rustc_span::DUMMY_SP,
};
let header = ast::FnHeader { unsafety, ext, ..ast::FnHeader::default() };
self.print_fn(decl, header, name, &generics);
self.end();
}
crate fn maybe_print_trailing_comment(
&mut self,
span: rustc_span::Span,
next_pos: Option<BytePos>,
) {
if let Some(cmnts) = self.comments() {
if let Some(cmnt) = cmnts.trailing_comment(span, next_pos) {
self.print_comment(&cmnt);
}
}
}
crate fn print_remaining_comments(&mut self) {
// If there aren't any remaining comments, then we need to manually
// make sure there is a line break at the end.
if self.next_comment().is_none() {
self.s.hardbreak();
}
while let Some(ref cmnt) = self.next_comment() {
self.print_comment(cmnt);
}
}
crate fn print_fn_header_info(&mut self, header: ast::FnHeader) {
self.print_constness(header.constness);
self.print_asyncness(header.asyncness);
self.print_unsafety(header.unsafety);
match header.ext {
ast::Extern::None => {}
ast::Extern::Implicit => {
self.word_nbsp("extern");
}
ast::Extern::Explicit(abi) => {
self.word_nbsp("extern");
self.print_literal(&abi.as_lit());
self.nbsp();
}
}
self.s.word("fn")
}
crate fn print_unsafety(&mut self, s: ast::Unsafe) {
match s {
ast::Unsafe::No => {}
ast::Unsafe::Yes(_) => self.word_nbsp("unsafe"),
}
}
crate fn print_constness(&mut self, s: ast::Const) {
match s {
ast::Const::No => {}
ast::Const::Yes(_) => self.word_nbsp("const"),
}
}
crate fn print_is_auto(&mut self, s: ast::IsAuto) {
match s {
ast::IsAuto::Yes => self.word_nbsp("auto"),
ast::IsAuto::No => {}
}
}
}
| 35.853042 | 100 | 0.48165 |
ef01e9919eb99566516d78ebb38c75c033a0a9a2 | 2,588 | use crate::jcli_app::{
rest::Error,
utils::{io, DebugFlag, HostAddr, OutputFormat, RestApiSender},
};
use chain_core::property::Deserialize;
use chain_impl_mockchain::fragment::Fragment;
use std::path::PathBuf;
use structopt::StructOpt;
extern crate bytes;
use self::bytes::IntoBuf;
use chain_core::property::Fragment as fragment_property;
#[derive(StructOpt)]
#[structopt(rename_all = "kebab-case")]
pub enum Message {
/// Post message. Prints id for posted message
Post {
#[structopt(flatten)]
addr: HostAddr,
#[structopt(flatten)]
debug: DebugFlag,
/// File containing hex-encoded message.
/// If not provided, message will be read from stdin.
#[structopt(short, long)]
file: Option<PathBuf>,
},
/// get the node's logs on the message pool. This will provide information
/// on pending transaction, rejected transaction and or when a transaction
/// has been added in a block
Logs {
#[structopt(flatten)]
addr: HostAddr,
#[structopt(flatten)]
debug: DebugFlag,
#[structopt(flatten)]
output_format: OutputFormat,
},
}
impl Message {
pub fn exec(self) -> Result<(), Error> {
match self {
Message::Post { addr, debug, file } => post_message(file, addr, debug),
Message::Logs {
addr,
debug,
output_format,
} => get_logs(addr, debug, output_format),
}
}
}
fn get_logs(addr: HostAddr, debug: DebugFlag, output_format: OutputFormat) -> Result<(), Error> {
let url = addr.with_segments(&["v0", "fragment", "logs"])?.into_url();
let builder = reqwest::Client::new().get(url);
let response = RestApiSender::new(builder, &debug).send()?;
response.ok_response()?;
let status = response.body().json_value()?;
let formatted = output_format.format_json(status)?;
println!("{}", formatted);
Ok(())
}
fn post_message(file: Option<PathBuf>, addr: HostAddr, debug: DebugFlag) -> Result<(), Error> {
let msg_hex = io::read_line(&file)?;
let msg_bin = hex::decode(&msg_hex)?;
let url = addr.with_segments(&["v0", "message"])?.into_url();
let builder = reqwest::Client::new().post(url);
let fragment = Fragment::deserialize(msg_bin.as_slice().into_buf())
.map_err(Error::InputFragmentMalformed)?;
let response = RestApiSender::new(builder, &debug)
.with_binary_body(msg_bin)
.send()?;
response.ok_response()?;
println!("{}", fragment.id());
Ok(())
}
| 32.759494 | 97 | 0.620556 |
76f48d87159ce3cbbcd9dc41243fc301acc107d7 | 32,434 | // Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
#![deny(warnings)]
// Enable all clippy lints except for many of the pedantic ones. It's a shame this needs to be copied and pasted across crates, but there doesn't appear to be a way to include inner attributes from a common source.
#![deny(
clippy::all,
clippy::default_trait_access,
clippy::expl_impl_clone_on_copy,
clippy::if_not_else,
clippy::needless_continue,
clippy::single_match_else,
clippy::unseparated_literal_suffix,
clippy::used_underscore_binding
)]
// It is often more clear to show that nothing is being moved.
#![allow(clippy::match_ref_pats)]
// Subjective style.
#![allow(
clippy::len_without_is_empty,
clippy::redundant_field_names,
clippy::too_many_arguments
)]
// Default isn't as big a deal as people seem to think it is.
#![allow(clippy::new_without_default, clippy::new_ret_no_self)]
// Arc<Mutex> can be more clear than needing to grok Orderings:
#![allow(clippy::mutex_atomic)]
use bazel_protos;
use clap;
use dirs;
use errno;
use fs;
use fuse;
use futures_timer;
use libc;
use serverset;
use time;
use futures::future::Future;
use hashing::{Digest, Fingerprint};
use log::{debug, error, warn};
use parking_lot::Mutex;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::collections::HashMap;
use std::ffi::{CString, OsStr, OsString};
use std::path::Path;
use std::sync::Arc;
const TTL: time::Timespec = time::Timespec { sec: 0, nsec: 0 };
const CREATE_TIME: time::Timespec = time::Timespec { sec: 1, nsec: 0 };
fn dir_attr_for(inode: Inode) -> fuse::FileAttr {
attr_for(inode, 0, fuse::FileType::Directory, 0x555)
}
fn attr_for(inode: Inode, size: u64, kind: fuse::FileType, perm: u16) -> fuse::FileAttr {
fuse::FileAttr {
ino: inode,
size: size,
// TODO: Find out whether blocks is actaully important
blocks: 0,
atime: CREATE_TIME,
mtime: CREATE_TIME,
ctime: CREATE_TIME,
crtime: CREATE_TIME,
kind: kind,
perm: perm,
nlink: 1,
uid: 0,
gid: 0,
rdev: 0,
flags: 0,
}
}
pub fn digest_from_filepath(str: &str) -> Result<Digest, String> {
let mut parts = str.split('-');
let fingerprint_str = parts
.next()
.ok_or_else(|| format!("Invalid digest: {} wasn't of form fingerprint-size", str))?;
let fingerprint = Fingerprint::from_hex_string(fingerprint_str)?;
let size_bytes = parts
.next()
.ok_or_else(|| format!("Invalid digest: {} wasn't of form fingerprint-size", str))?
.parse::<usize>()
.map_err(|err| format!("Invalid digest; size {} not a number: {}", str, err))?;
Ok(Digest(fingerprint, size_bytes))
}
type Inode = u64;
const ROOT: Inode = 1;
const DIGEST_ROOT: Inode = 2;
const DIRECTORY_ROOT: Inode = 3;
#[derive(Clone, Copy, Debug)]
enum EntryType {
File,
Directory,
}
#[derive(Clone, Copy, Debug)]
struct InodeDetails {
digest: Digest,
entry_type: EntryType,
is_executable: bool,
}
#[derive(Debug)]
struct ReaddirEntry {
inode: Inode,
kind: fuse::FileType,
name: OsString,
}
enum Node {
Directory(bazel_protos::remote_execution::DirectoryNode),
File(bazel_protos::remote_execution::FileNode),
}
struct BuildResultFS {
store: fs::Store,
inode_digest_cache: HashMap<Inode, InodeDetails>,
digest_inode_cache: HashMap<Digest, (Inode, Inode)>,
directory_inode_cache: HashMap<Digest, Inode>,
next_inode: Inode,
}
impl BuildResultFS {
pub fn new(store: fs::Store) -> BuildResultFS {
BuildResultFS {
store: store,
inode_digest_cache: HashMap::new(),
digest_inode_cache: HashMap::new(),
directory_inode_cache: HashMap::new(),
next_inode: 4,
}
}
}
impl BuildResultFS {
pub fn node_for_digest(
&mut self,
directory: &bazel_protos::remote_execution::Directory,
filename: &str,
) -> Option<Node> {
for file in directory.get_files() {
if file.get_name() == filename {
return Some(Node::File(file.clone()));
}
}
for child in directory.get_directories() {
if child.get_name() == filename {
return Some(Node::Directory(child.clone()));
}
}
None
}
pub fn inode_for_file(
&mut self,
digest: Digest,
is_executable: bool,
) -> Result<Option<Inode>, String> {
match self.digest_inode_cache.entry(digest) {
Occupied(entry) => {
let (executable_inode, non_executable_inode) = *entry.get();
Ok(Some(if is_executable {
executable_inode
} else {
non_executable_inode
}))
}
Vacant(entry) => match self.store.load_file_bytes_with(digest, |_| ()).wait() {
Ok(Some(())) => {
let executable_inode = self.next_inode;
self.next_inode += 1;
let non_executable_inode = self.next_inode;
self.next_inode += 1;
entry.insert((executable_inode, non_executable_inode));
self.inode_digest_cache.insert(
executable_inode,
InodeDetails {
digest: digest,
entry_type: EntryType::File,
is_executable: true,
},
);
self.inode_digest_cache.insert(
non_executable_inode,
InodeDetails {
digest: digest,
entry_type: EntryType::File,
is_executable: false,
},
);
Ok(Some(if is_executable {
executable_inode
} else {
non_executable_inode
}))
}
Ok(None) => Ok(None),
Err(err) => Err(err),
},
}
}
pub fn inode_for_directory(&mut self, digest: Digest) -> Result<Option<Inode>, String> {
match self.directory_inode_cache.entry(digest) {
Occupied(entry) => Ok(Some(*entry.get())),
Vacant(entry) => match self.store.load_directory(digest).wait() {
Ok(Some(_)) => {
// TODO: Kick off some background futures to pre-load the contents of this Directory into
// an in-memory cache. Keep a background CPU pool driving those Futures.
let inode = self.next_inode;
self.next_inode += 1;
entry.insert(inode);
self.inode_digest_cache.insert(
inode,
InodeDetails {
digest: digest,
entry_type: EntryType::Directory,
is_executable: true,
},
);
Ok(Some(inode))
}
Ok(None) => Ok(None),
Err(err) => Err(err),
},
}
}
pub fn file_attr_for(&mut self, inode: Inode) -> Option<fuse::FileAttr> {
self.inode_digest_cache.get(&inode).map(|f| {
attr_for(
inode,
f.digest.1 as u64,
fuse::FileType::RegularFile,
if f.is_executable { 0o555 } else { 0o444 },
)
})
}
pub fn dir_attr_for(&mut self, digest: Digest) -> Result<fuse::FileAttr, i32> {
match self.inode_for_directory(digest) {
Ok(Some(inode)) => Ok(dir_attr_for(inode)),
Ok(None) => Err(libc::ENOENT),
Err(err) => {
error!("Error getting directory for digest {:?}: {}", digest, err);
Err(libc::EINVAL)
}
}
}
pub fn readdir_entries(&mut self, inode: Inode) -> Result<Vec<ReaddirEntry>, i32> {
match inode {
ROOT => Ok(vec![
ReaddirEntry {
inode: ROOT,
kind: fuse::FileType::Directory,
name: OsString::from("."),
},
ReaddirEntry {
inode: ROOT,
kind: fuse::FileType::Directory,
name: OsString::from(".."),
},
ReaddirEntry {
inode: DIGEST_ROOT,
kind: fuse::FileType::Directory,
name: OsString::from("digest"),
},
ReaddirEntry {
inode: DIRECTORY_ROOT,
kind: fuse::FileType::Directory,
name: OsString::from("directory"),
},
]),
// readdir on /digest or /directory will return an empty set.
// readdir on /directory/abc123... will properly list the contents of that Directory.
//
// We skip directory listing for the roots because they will just be very long lists of
// digests. The only other behaviours we could reasonable use are:
// 1. Enumerate the entire contents of the local Store (which will be large), ignoring the
// remote Store (so the directory listing will still be incomplete - stuff which can be
// getattr'd/open'd will still not be present in the directory listing).
// 2. Store a cache of requests we've successfully served, and claim that the directory
// contains exactly those files/directories.
// All three of these end up with the same problem that readdir doesn't show things which, if
// you were to getattr/open would actually exist. So we choose the cheapest, and most
// consistent one: readdir is always empty.
DIGEST_ROOT | DIRECTORY_ROOT => Ok(vec![]),
inode => match self.inode_digest_cache.get(&inode) {
Some(&InodeDetails {
digest,
entry_type: EntryType::Directory,
..
}) => {
let maybe_directory = self.store.load_directory(digest).wait();
match maybe_directory {
Ok(Some(directory)) => {
let mut entries = vec![
ReaddirEntry {
inode: inode,
kind: fuse::FileType::Directory,
name: OsString::from("."),
},
ReaddirEntry {
inode: DIRECTORY_ROOT,
kind: fuse::FileType::Directory,
name: OsString::from(".."),
},
];
let directories = directory.get_directories().iter().map(|directory| {
(
directory.get_digest(),
directory.get_name(),
fuse::FileType::Directory,
true,
)
});
let files = directory.get_files().iter().map(|file| {
(
file.get_digest(),
file.get_name(),
fuse::FileType::RegularFile,
file.get_is_executable(),
)
});
for (digest, name, filetype, is_executable) in directories.chain(files) {
let child_digest_result: Result<Digest, String> = digest.into();
let child_digest = child_digest_result.map_err(|err| {
error!("Error parsing digest: {:?}", err);
libc::ENOENT
})?;
let maybe_child_inode = match filetype {
fuse::FileType::Directory => self.inode_for_directory(child_digest),
fuse::FileType::RegularFile => self.inode_for_file(child_digest, is_executable),
_ => unreachable!(),
};
match maybe_child_inode {
Ok(Some(child_inode)) => {
entries.push(ReaddirEntry {
inode: child_inode,
kind: filetype,
name: OsString::from(name),
});
}
Ok(None) => {
return Err(libc::ENOENT);
}
Err(err) => {
error!("Error reading child directory {:?}: {}", child_digest, err);
return Err(libc::EINVAL);
}
}
}
Ok(entries)
}
Ok(None) => Err(libc::ENOENT),
Err(err) => {
error!("Error loading directory {:?}: {}", digest, err);
Err(libc::EINVAL)
}
}
}
_ => Err(libc::ENOENT),
},
}
}
}
// inodes:
// 1: /
// 2: /digest
// 3: /directory
// ... created on demand and cached for the lifetime of the program.
impl fuse::Filesystem for BuildResultFS {
// Used to answer stat calls
fn lookup(
&mut self,
_req: &fuse::Request<'_>,
parent: Inode,
name: &OsStr,
reply: fuse::ReplyEntry,
) {
let r = match (parent, name.to_str()) {
(ROOT, Some("digest")) => Ok(dir_attr_for(DIGEST_ROOT)),
(ROOT, Some("directory")) => Ok(dir_attr_for(DIRECTORY_ROOT)),
(DIGEST_ROOT, Some(digest_str)) => match digest_from_filepath(digest_str) {
Ok(digest) => self
.inode_for_file(digest, true)
.map_err(|err| {
error!("Error loading file by digest {}: {}", digest_str, err);
libc::EINVAL
})
.and_then(|maybe_inode| {
maybe_inode
.and_then(|inode| self.file_attr_for(inode))
.ok_or(libc::ENOENT)
}),
Err(err) => {
warn!("Invalid digest for file in digest root: {}", err);
Err(libc::ENOENT)
}
},
(DIRECTORY_ROOT, Some(digest_str)) => match digest_from_filepath(digest_str) {
Ok(digest) => self.dir_attr_for(digest),
Err(err) => {
warn!("Invalid digest for directroy in directory root: {}", err);
Err(libc::ENOENT)
}
},
(parent, Some(filename)) => {
let maybe_cache_entry = self
.inode_digest_cache
.get(&parent)
.cloned()
.ok_or(libc::ENOENT);
maybe_cache_entry
.and_then(|cache_entry| {
let parent_digest = cache_entry.digest;
self
.store
.load_directory(parent_digest)
.wait()
.map_err(|err| {
error!("Error reading directory {:?}: {}", parent_digest, err);
libc::EINVAL
})?
.and_then(|directory| self.node_for_digest(&directory, filename))
.ok_or(libc::ENOENT)
})
.and_then(|node| match node {
Node::Directory(directory_node) => {
let digest_result: Result<Digest, String> = directory_node.get_digest().into();
let digest = digest_result.map_err(|err| {
error!("Error parsing digest: {:?}", err);
libc::ENOENT
})?;
self.dir_attr_for(digest)
}
Node::File(file_node) => {
let digest_result: Result<Digest, String> = file_node.get_digest().into();
let digest = digest_result.map_err(|err| {
error!("Error parsing digest: {:?}", err);
libc::ENOENT
})?;
self
.inode_for_file(digest, file_node.get_is_executable())
.map_err(|err| {
error!("Error loading file by digest {}: {}", filename, err);
libc::EINVAL
})
.and_then(|maybe_inode| {
maybe_inode
.and_then(|inode| self.file_attr_for(inode))
.ok_or(libc::ENOENT)
})
}
})
}
_ => Err(libc::ENOENT),
};
match r {
Ok(r) => reply.entry(&TTL, &r, 1),
Err(err) => reply.error(err),
}
}
fn getattr(&mut self, _req: &fuse::Request<'_>, inode: Inode, reply: fuse::ReplyAttr) {
match inode {
ROOT => reply.attr(&TTL, &dir_attr_for(ROOT)),
DIGEST_ROOT => reply.attr(&TTL, &dir_attr_for(DIGEST_ROOT)),
DIRECTORY_ROOT => reply.attr(&TTL, &dir_attr_for(DIRECTORY_ROOT)),
_ => match self.inode_digest_cache.get(&inode) {
Some(&InodeDetails {
entry_type: EntryType::File,
..
}) => match self.file_attr_for(inode) {
Some(file_attr) => reply.attr(&TTL, &file_attr),
None => reply.error(libc::ENOENT),
},
Some(&InodeDetails {
entry_type: EntryType::Directory,
..
}) => reply.attr(&TTL, &dir_attr_for(inode)),
_ => reply.error(libc::ENOENT),
},
}
}
// TODO: Find out whether fh is ever passed if open isn't explicitly implemented (and whether offset is ever negative)
fn read(
&mut self,
_req: &fuse::Request<'_>,
inode: Inode,
_fh: u64,
offset: i64,
size: u32,
reply: fuse::ReplyData,
) {
match self.inode_digest_cache.get(&inode) {
Some(&InodeDetails {
digest,
entry_type: EntryType::File,
..
}) => {
let reply = Arc::new(Mutex::new(Some(reply)));
let reply2 = reply.clone();
// TODO: Read from a cache of Futures driven from a CPU pool, so we can merge in-flight
// requests, rather than reading from the store directly here.
let result: Result<(), ()> = self
.store
.load_file_bytes_with(digest, move |bytes| {
let begin = std::cmp::min(offset as usize, bytes.len());
let end = std::cmp::min(offset as usize + size as usize, bytes.len());
let mut reply = reply.lock();
reply.take().unwrap().data(&bytes.slice(begin, end));
})
.map(|v| {
if v.is_none() {
let maybe_reply = reply2.lock().take();
if let Some(reply) = maybe_reply {
reply.error(libc::ENOENT);
}
}
})
.or_else(|err| {
error!("Error loading bytes for {:?}: {}", digest, err);
let maybe_reply = reply2.lock().take();
if let Some(reply) = maybe_reply {
reply.error(libc::EINVAL);
}
Ok(())
})
.wait();
result.expect("Error from read future which should have been handled in the future ");
}
_ => reply.error(libc::ENOENT),
}
}
fn readdir(
&mut self,
_req: &fuse::Request<'_>,
inode: Inode,
// TODO: Find out whether fh is ever passed if open isn't explicitly implemented (and whether offset is ever negative)
_fh: u64,
offset: i64,
mut reply: fuse::ReplyDirectory,
) {
match self.readdir_entries(inode) {
Ok(entries) => {
// 0 is a magic offset which means no offset, whereas a non-zero offset means start
// _after_ that entry. Inconsistency is fun.
let to_skip = if offset == 0 { 0 } else { offset + 1 } as usize;
let mut i = offset;
for entry in entries.into_iter().skip(to_skip) {
if reply.add(entry.inode, i, entry.kind, entry.name) {
// Buffer is full, don't add more entries.
break;
}
i += 1;
}
reply.ok();
}
Err(err) => reply.error(err),
}
}
// If this isn't implemented, OSX will try to manipulate ._ files to manage xattrs out of band, which adds both overhead and logspam.
fn listxattr(
&mut self,
_req: &fuse::Request<'_>,
_inode: Inode,
_size: u32,
reply: fuse::ReplyXattr,
) {
reply.size(0);
}
}
pub fn mount<'a, P: AsRef<Path>>(
mount_path: P,
store: fs::Store,
) -> std::io::Result<fuse::BackgroundSession<'a>> {
// TODO: Work out how to disable caching in the filesystem
let options = ["-o", "ro", "-o", "fsname=brfs", "-o", "noapplexattr"]
.iter()
.map(|o| o.as_ref())
.collect::<Vec<&OsStr>>();
debug!("About to spawn_mount with options {:?}", options);
let fs = unsafe { fuse::spawn_mount(BuildResultFS::new(store), &mount_path, &options) };
// fuse::spawn_mount doesn't always fully initialise the filesystem before returning.
// Bluntly sleep for a bit here. If this poses a problem, we should maybe start doing some polling
// stats or something until the filesystem seems to be correct.
std::thread::sleep(std::time::Duration::from_secs(1));
debug!("Did spawn mount");
fs
}
fn main() {
let default_store_path = dirs::home_dir()
.expect("Couldn't find homedir")
.join(".cache")
.join("pants")
.join("lmdb_store");
let args = clap::App::new("brfs")
.arg(
clap::Arg::with_name("local-store-path")
.takes_value(true)
.long("local-store-path")
.default_value_os(default_store_path.as_ref())
.required(false),
).arg(
clap::Arg::with_name("server-address")
.takes_value(true)
.long("server-address")
.required(false),
).arg(
clap::Arg::with_name("remote-instance-name")
.takes_value(true)
.long("remote-instance-name")
.required(false),
).arg(
clap::Arg::with_name("root-ca-cert-file")
.help("Path to file containing root certificate authority certificates. If not set, TLS will not be used when connecting to the remote.")
.takes_value(true)
.long("root-ca-cert-file")
.required(false)
).arg(clap::Arg::with_name("oauth-bearer-token-file")
.help("Path to file containing oauth bearer token. If not set, no authorization will be provided to remote servers.")
.takes_value(true)
.long("oauth-bearer-token-file")
.required(false)
).arg(
clap::Arg::with_name("mount-path")
.required(true)
.takes_value(true),
).get_matches();
let mount_path = args.value_of("mount-path").unwrap();
let store_path = args.value_of("local-store-path").unwrap();
// Unmount whatever happens to be mounted there already.
// This is handy for development, but should probably be removed :)
let unmount_return = unmount(mount_path);
if unmount_return != 0 {
match errno::errno() {
errno::Errno(22) => {
debug!("unmount failed, continuing because error code suggests directory was not mounted")
}
v => panic!("Error unmounting: {:?}", v),
}
}
let root_ca_certs = if let Some(path) = args.value_of("root-ca-cert-file") {
Some(std::fs::read(path).expect("Error reading root CA certs file"))
} else {
None
};
let oauth_bearer_token = if let Some(path) = args.value_of("oauth-bearer-token-file") {
Some(std::fs::read_to_string(path).expect("Error reading oauth bearer token file"))
} else {
None
};
let pool = Arc::new(fs::ResettablePool::new("brfs-".to_owned()));
let store = match args.value_of("server-address") {
Some(address) => fs::Store::with_remote(
&store_path,
pool,
&[address.to_owned()],
args.value_of("remote-instance-name").map(str::to_owned),
&root_ca_certs,
oauth_bearer_token,
1,
4 * 1024 * 1024,
std::time::Duration::from_secs(5 * 60),
// TODO: Take a command line arg.
serverset::BackoffConfig::new(
std::time::Duration::from_secs(1),
1.2,
std::time::Duration::from_secs(20),
)
.expect("Error making BackoffConfig"),
1,
futures_timer::TimerHandle::default(),
),
None => fs::Store::local_only(&store_path, pool),
}
.expect("Error making store");
let _fs = mount(mount_path, store).expect("Error mounting");
loop {
std::thread::sleep(std::time::Duration::from_secs(1));
}
}
#[cfg(target_os = "macos")]
fn unmount(mount_path: &str) -> i32 {
unsafe {
let path = CString::new(mount_path).unwrap();
libc::unmount(path.as_ptr(), 0)
}
}
#[cfg(target_os = "linux")]
fn unmount(mount_path: &str) -> i32 {
unsafe {
let path = CString::new(mount_path).unwrap();
libc::umount(path.as_ptr())
}
}
#[cfg(test)]
mod test {
use tempfile;
use testutil;
use super::mount;
use fs;
use futures::future::Future;
use hashing;
use std::sync::Arc;
use testutil::{
data::{TestData, TestDirectory},
file,
};
#[test]
fn missing_digest() {
let (store_dir, mount_dir) = make_dirs();
let store = fs::Store::local_only(
store_dir.path(),
Arc::new(fs::ResettablePool::new("test-pool-".to_string())),
)
.expect("Error creating local store");
let _fs = mount(mount_dir.path(), store).expect("Mounting");
assert!(!&mount_dir
.path()
.join("digest")
.join(digest_to_filepath(&TestData::roland().digest()))
.exists());
}
#[test]
fn read_file_by_digest() {
let (store_dir, mount_dir) = make_dirs();
let store = fs::Store::local_only(
store_dir.path(),
Arc::new(fs::ResettablePool::new("test-pool-".to_string())),
)
.expect("Error creating local store");
let test_bytes = TestData::roland();
store
.store_file_bytes(test_bytes.bytes(), false)
.wait()
.expect("Storing bytes");
let _fs = mount(mount_dir.path(), store).expect("Mounting");
let file_path = mount_dir
.path()
.join("digest")
.join(digest_to_filepath(&test_bytes.digest()));
assert_eq!(test_bytes.bytes(), file::contents(&file_path));
assert!(file::is_executable(&file_path));
}
#[test]
fn list_directory() {
let (store_dir, mount_dir) = make_dirs();
let store = fs::Store::local_only(
store_dir.path(),
Arc::new(fs::ResettablePool::new("test-pool-".to_string())),
)
.expect("Error creating local store");
let test_bytes = TestData::roland();
let test_directory = TestDirectory::containing_roland();
store
.store_file_bytes(test_bytes.bytes(), false)
.wait()
.expect("Storing bytes");
store
.record_directory(&test_directory.directory(), false)
.wait()
.expect("Storing directory");
let _fs = mount(mount_dir.path(), store).expect("Mounting");
let virtual_dir = mount_dir
.path()
.join("directory")
.join(digest_to_filepath(&test_directory.digest()));
assert_eq!(vec!["roland"], file::list_dir(&virtual_dir));
}
#[test]
fn read_file_from_directory() {
let (store_dir, mount_dir) = make_dirs();
let store = fs::Store::local_only(
store_dir.path(),
Arc::new(fs::ResettablePool::new("test-pool-".to_string())),
)
.expect("Error creating local store");
let test_bytes = TestData::roland();
let test_directory = TestDirectory::containing_roland();
store
.store_file_bytes(test_bytes.bytes(), false)
.wait()
.expect("Storing bytes");
store
.record_directory(&test_directory.directory(), false)
.wait()
.expect("Storing directory");
let _fs = mount(mount_dir.path(), store).expect("Mounting");
let roland = mount_dir
.path()
.join("directory")
.join(digest_to_filepath(&test_directory.digest()))
.join("roland");
assert_eq!(test_bytes.bytes(), file::contents(&roland));
assert!(!file::is_executable(&roland));
}
#[test]
fn list_recursive_directory() {
let (store_dir, mount_dir) = make_dirs();
let store = fs::Store::local_only(
store_dir.path(),
Arc::new(fs::ResettablePool::new("test-pool-".to_string())),
)
.expect("Error creating local store");
let test_bytes = TestData::roland();
let treat_bytes = TestData::catnip();
let test_directory = TestDirectory::containing_roland();
let recursive_directory = TestDirectory::recursive();
store
.store_file_bytes(test_bytes.bytes(), false)
.wait()
.expect("Storing bytes");
store
.store_file_bytes(treat_bytes.bytes(), false)
.wait()
.expect("Storing bytes");
store
.record_directory(&test_directory.directory(), false)
.wait()
.expect("Storing directory");
store
.record_directory(&recursive_directory.directory(), false)
.wait()
.expect("Storing directory");
let _fs = mount(mount_dir.path(), store).expect("Mounting");
let virtual_dir = mount_dir
.path()
.join("directory")
.join(digest_to_filepath(&recursive_directory.digest()));
assert_eq!(vec!["cats", "treats"], file::list_dir(&virtual_dir));
assert_eq!(vec!["roland"], file::list_dir(&virtual_dir.join("cats")));
}
#[test]
fn read_file_from_recursive_directory() {
let (store_dir, mount_dir) = make_dirs();
let store = fs::Store::local_only(
store_dir.path(),
Arc::new(fs::ResettablePool::new("test-pool-".to_string())),
)
.expect("Error creating local store");
let test_bytes = TestData::roland();
let treat_bytes = TestData::catnip();
let test_directory = TestDirectory::containing_roland();
let recursive_directory = TestDirectory::recursive();
store
.store_file_bytes(test_bytes.bytes(), false)
.wait()
.expect("Storing bytes");
store
.store_file_bytes(treat_bytes.bytes(), false)
.wait()
.expect("Storing bytes");
store
.record_directory(&test_directory.directory(), false)
.wait()
.expect("Storing directory");
store
.record_directory(&recursive_directory.directory(), false)
.wait()
.expect("Storing directory");
let _fs = mount(mount_dir.path(), store).expect("Mounting");
let virtual_dir = mount_dir
.path()
.join("directory")
.join(digest_to_filepath(&recursive_directory.digest()));
let treats = virtual_dir.join("treats");
assert_eq!(treat_bytes.bytes(), file::contents(&treats));
assert!(!file::is_executable(&treats));
let roland = virtual_dir.join("cats").join("roland");
assert_eq!(test_bytes.bytes(), file::contents(&roland));
assert!(!file::is_executable(&roland));
}
#[test]
fn files_are_correctly_executable() {
let (store_dir, mount_dir) = make_dirs();
let store = fs::Store::local_only(
store_dir.path(),
Arc::new(fs::ResettablePool::new("test-pool-".to_string())),
)
.expect("Error creating local store");
let treat_bytes = TestData::catnip();
let directory = TestDirectory::with_mixed_executable_files();
store
.store_file_bytes(treat_bytes.bytes(), false)
.wait()
.expect("Storing bytes");
store
.record_directory(&directory.directory(), false)
.wait()
.expect("Storing directory");
let _fs = mount(mount_dir.path(), store).expect("Mounting");
let virtual_dir = mount_dir
.path()
.join("directory")
.join(digest_to_filepath(&directory.digest()));
assert_eq!(vec!["feed", "food"], file::list_dir(&virtual_dir));
assert!(file::is_executable(&virtual_dir.join("feed")));
assert!(!file::is_executable(&virtual_dir.join("food")));
}
pub fn digest_to_filepath(digest: &hashing::Digest) -> String {
format!("{}-{}", digest.0, digest.1)
}
pub fn make_dirs() -> (tempfile::TempDir, tempfile::TempDir) {
let store_dir = tempfile::Builder::new().prefix("store").tempdir().unwrap();
let mount_dir = tempfile::Builder::new().prefix("mount").tempdir().unwrap();
(store_dir, mount_dir)
}
}
// TODO: Write a bunch more syscall-y tests to test that each syscall for each file/directory type
// acts as we expect.
#[cfg(test)]
mod syscall_tests {
use super::mount;
use super::test::digest_to_filepath;
use crate::test::make_dirs;
use fs;
use futures::Future;
use libc;
use std::ffi::CString;
use std::path::Path;
use std::sync::Arc;
use testutil::data::TestData;
#[test]
fn read_file_by_digest_exact_bytes() {
let (store_dir, mount_dir) = make_dirs();
let store = fs::Store::local_only(
store_dir.path(),
Arc::new(fs::ResettablePool::new("test-pool-".to_string())),
)
.expect("Error creating local store");
let test_bytes = TestData::roland();
store
.store_file_bytes(test_bytes.bytes(), false)
.wait()
.expect("Storing bytes");
let _fs = mount(mount_dir.path(), store).expect("Mounting");
let path = mount_dir
.path()
.join("digest")
.join(digest_to_filepath(&test_bytes.digest()));
let mut buf = make_buffer(test_bytes.len());
unsafe {
let fd = libc::open(path_to_cstring(&path).as_ptr(), 0);
assert!(fd > 0, "Bad fd {}", fd);
let read_bytes = libc::read(fd, buf.as_mut_ptr() as *mut libc::c_void, buf.len());
assert_eq!(test_bytes.len() as isize, read_bytes);
assert_eq!(0, libc::close(fd));
}
assert_eq!(test_bytes.string(), String::from_utf8(buf).unwrap());
}
fn path_to_cstring(path: &Path) -> CString {
CString::new(path.to_string_lossy().as_bytes().to_owned()).unwrap()
}
fn make_buffer(size: usize) -> Vec<u8> {
let mut buf: Vec<u8> = Vec::new();
buf.resize(size, 0);
buf
}
}
| 30.978032 | 214 | 0.585528 |
1a8dfbc2b37ab233976e4a60adada20e2d3666c6 | 2,042 | // This file is part of the SORA network and Polkaswap app.
// Copyright (c) 2020, 2021, Polka Biome Ltd. All rights reserved.
// SPDX-License-Identifier: BSD-4-Clause
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
// Redistributions of source code must retain the above copyright notice, this list
// of conditions and the following disclaimer.
// Redistributions in binary form must reproduce the above copyright notice, this
// list of conditions and the following disclaimer in the documentation and/or other
// materials provided with the distribution.
//
// All advertising materials mentioning features or use of this software must display
// the following acknowledgement: This product includes software developed by Polka Biome
// Ltd., SORA, and Polkaswap.
//
// Neither the name of the Polka Biome Ltd. nor the names of its contributors may be used
// to endorse or promote products derived from this software without specific prior written permission.
// THIS SOFTWARE IS PROVIDED BY Polka Biome Ltd. AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Polka Biome Ltd. BE LIABLE FOR ANY
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
// OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#![cfg_attr(not(feature = "std"), no_std)]
#[macro_use]
extern crate alloc;
use alloc::string::String;
sp_api::decl_runtime_apis! {
pub trait IrohaMigrationAPI {
fn needs_migration(iroha_address: String) -> bool;
}
}
| 48.619048 | 103 | 0.770813 |
9c3792afa2f1c91b5e87e234f1258028d2dc5f30 | 89,002 | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! VecDeque is a double-ended queue, which is implemented with the help of a
//! growing ring buffer.
//!
//! This queue has `O(1)` amortized inserts and removals from both ends of the
//! container. It also has `O(1)` indexing like a vector. The contained elements
//! are not required to be copyable, and the queue will be sendable if the
//! contained type is sendable.
#![stable(feature = "rust1", since = "1.0.0")]
use core::cmp::Ordering;
use core::fmt;
use core::iter::{repeat, FromIterator};
use core::mem;
use core::ops::{Index, IndexMut};
use core::ptr;
use core::slice;
use core::hash::{Hash, Hasher};
use core::cmp;
use alloc::raw_vec::RawVec;
use super::range::RangeArgument;
use super::vec::Vec;
const INITIAL_CAPACITY: usize = 7; // 2^3 - 1
const MINIMUM_CAPACITY: usize = 1; // 2 - 1
#[cfg(target_pointer_width = "32")]
const MAXIMUM_ZST_CAPACITY: usize = 1 << (32 - 1); // Largest possible power of two
#[cfg(target_pointer_width = "64")]
const MAXIMUM_ZST_CAPACITY: usize = 1 << (64 - 1); // Largest possible power of two
/// `VecDeque` is a growable ring buffer, which can be used as a double-ended
/// queue efficiently.
///
/// The "default" usage of this type as a queue is to use `push_back` to add to
/// the queue, and `pop_front` to remove from the queue. `extend` and `append`
/// push onto the back in this manner, and iterating over `VecDeque` goes front
/// to back.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct VecDeque<T> {
// tail and head are pointers into the buffer. Tail always points
// to the first element that could be read, Head always points
// to where data should be written.
// If tail == head the buffer is empty. The length of the ringbuffer
// is defined as the distance between the two.
tail: usize,
head: usize,
buf: RawVec<T>,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Clone> Clone for VecDeque<T> {
fn clone(&self) -> VecDeque<T> {
self.iter().cloned().collect()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Drop for VecDeque<T> {
#[unsafe_destructor_blind_to_params]
fn drop(&mut self) {
let (front, back) = self.as_mut_slices();
unsafe {
// use drop for [T]
ptr::drop_in_place(front);
ptr::drop_in_place(back);
}
// RawVec handles deallocation
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Default for VecDeque<T> {
#[inline]
fn default() -> VecDeque<T> {
VecDeque::new()
}
}
impl<T> VecDeque<T> {
/// Marginally more convenient
#[inline]
fn ptr(&self) -> *mut T {
self.buf.ptr()
}
/// Marginally more convenient
#[inline]
fn cap(&self) -> usize {
if mem::size_of::<T>() == 0 {
// For zero sized types, we are always at maximum capacity
MAXIMUM_ZST_CAPACITY
} else {
self.buf.cap()
}
}
/// Turn ptr into a slice
#[inline]
unsafe fn buffer_as_slice(&self) -> &[T] {
slice::from_raw_parts(self.ptr(), self.cap())
}
/// Turn ptr into a mut slice
#[inline]
unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] {
slice::from_raw_parts_mut(self.ptr(), self.cap())
}
/// Moves an element out of the buffer
#[inline]
unsafe fn buffer_read(&mut self, off: usize) -> T {
ptr::read(self.ptr().offset(off as isize))
}
/// Writes an element into the buffer, moving it.
#[inline]
unsafe fn buffer_write(&mut self, off: usize, value: T) {
ptr::write(self.ptr().offset(off as isize), value);
}
/// Returns true if and only if the buffer is at capacity
#[inline]
fn is_full(&self) -> bool {
self.cap() - self.len() == 1
}
/// Returns the index in the underlying buffer for a given logical element
/// index.
#[inline]
fn wrap_index(&self, idx: usize) -> usize {
wrap_index(idx, self.cap())
}
/// Returns the index in the underlying buffer for a given logical element
/// index + addend.
#[inline]
fn wrap_add(&self, idx: usize, addend: usize) -> usize {
wrap_index(idx.wrapping_add(addend), self.cap())
}
/// Returns the index in the underlying buffer for a given logical element
/// index - subtrahend.
#[inline]
fn wrap_sub(&self, idx: usize, subtrahend: usize) -> usize {
wrap_index(idx.wrapping_sub(subtrahend), self.cap())
}
/// Copies a contiguous block of memory len long from src to dst
#[inline]
unsafe fn copy(&self, dst: usize, src: usize, len: usize) {
debug_assert!(dst + len <= self.cap(),
"cpy dst={} src={} len={} cap={}",
dst,
src,
len,
self.cap());
debug_assert!(src + len <= self.cap(),
"cpy dst={} src={} len={} cap={}",
dst,
src,
len,
self.cap());
ptr::copy(self.ptr().offset(src as isize),
self.ptr().offset(dst as isize),
len);
}
/// Copies a contiguous block of memory len long from src to dst
#[inline]
unsafe fn copy_nonoverlapping(&self, dst: usize, src: usize, len: usize) {
debug_assert!(dst + len <= self.cap(),
"cno dst={} src={} len={} cap={}",
dst,
src,
len,
self.cap());
debug_assert!(src + len <= self.cap(),
"cno dst={} src={} len={} cap={}",
dst,
src,
len,
self.cap());
ptr::copy_nonoverlapping(self.ptr().offset(src as isize),
self.ptr().offset(dst as isize),
len);
}
/// Copies a potentially wrapping block of memory len long from src to dest.
/// (abs(dst - src) + len) must be no larger than cap() (There must be at
/// most one continuous overlapping region between src and dest).
unsafe fn wrap_copy(&self, dst: usize, src: usize, len: usize) {
#[allow(dead_code)]
fn diff(a: usize, b: usize) -> usize {
if a <= b {
b - a
} else {
a - b
}
}
debug_assert!(cmp::min(diff(dst, src), self.cap() - diff(dst, src)) + len <= self.cap(),
"wrc dst={} src={} len={} cap={}",
dst,
src,
len,
self.cap());
if src == dst || len == 0 {
return;
}
let dst_after_src = self.wrap_sub(dst, src) < len;
let src_pre_wrap_len = self.cap() - src;
let dst_pre_wrap_len = self.cap() - dst;
let src_wraps = src_pre_wrap_len < len;
let dst_wraps = dst_pre_wrap_len < len;
match (dst_after_src, src_wraps, dst_wraps) {
(_, false, false) => {
// src doesn't wrap, dst doesn't wrap
//
// S . . .
// 1 [_ _ A A B B C C _]
// 2 [_ _ A A A A B B _]
// D . . .
//
self.copy(dst, src, len);
}
(false, false, true) => {
// dst before src, src doesn't wrap, dst wraps
//
// S . . .
// 1 [A A B B _ _ _ C C]
// 2 [A A B B _ _ _ A A]
// 3 [B B B B _ _ _ A A]
// . . D .
//
self.copy(dst, src, dst_pre_wrap_len);
self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
}
(true, false, true) => {
// src before dst, src doesn't wrap, dst wraps
//
// S . . .
// 1 [C C _ _ _ A A B B]
// 2 [B B _ _ _ A A B B]
// 3 [B B _ _ _ A A A A]
// . . D .
//
self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
self.copy(dst, src, dst_pre_wrap_len);
}
(false, true, false) => {
// dst before src, src wraps, dst doesn't wrap
//
// . . S .
// 1 [C C _ _ _ A A B B]
// 2 [C C _ _ _ B B B B]
// 3 [C C _ _ _ B B C C]
// D . . .
//
self.copy(dst, src, src_pre_wrap_len);
self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
}
(true, true, false) => {
// src before dst, src wraps, dst doesn't wrap
//
// . . S .
// 1 [A A B B _ _ _ C C]
// 2 [A A A A _ _ _ C C]
// 3 [C C A A _ _ _ C C]
// D . . .
//
self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
self.copy(dst, src, src_pre_wrap_len);
}
(false, true, true) => {
// dst before src, src wraps, dst wraps
//
// . . . S .
// 1 [A B C D _ E F G H]
// 2 [A B C D _ E G H H]
// 3 [A B C D _ E G H A]
// 4 [B C C D _ E G H A]
// . . D . .
//
debug_assert!(dst_pre_wrap_len > src_pre_wrap_len);
let delta = dst_pre_wrap_len - src_pre_wrap_len;
self.copy(dst, src, src_pre_wrap_len);
self.copy(dst + src_pre_wrap_len, 0, delta);
self.copy(0, delta, len - dst_pre_wrap_len);
}
(true, true, true) => {
// src before dst, src wraps, dst wraps
//
// . . S . .
// 1 [A B C D _ E F G H]
// 2 [A A B D _ E F G H]
// 3 [H A B D _ E F G H]
// 4 [H A B D _ E F F G]
// . . . D .
//
debug_assert!(src_pre_wrap_len > dst_pre_wrap_len);
let delta = src_pre_wrap_len - dst_pre_wrap_len;
self.copy(delta, 0, len - src_pre_wrap_len);
self.copy(0, self.cap() - delta, delta);
self.copy(dst, src, dst_pre_wrap_len);
}
}
}
/// Frobs the head and tail sections around to handle the fact that we
/// just reallocated. Unsafe because it trusts old_cap.
#[inline]
unsafe fn handle_cap_increase(&mut self, old_cap: usize) {
let new_cap = self.cap();
// Move the shortest contiguous section of the ring buffer
// T H
// [o o o o o o o . ]
// T H
// A [o o o o o o o . . . . . . . . . ]
// H T
// [o o . o o o o o ]
// T H
// B [. . . o o o o o o o . . . . . . ]
// H T
// [o o o o o . o o ]
// H T
// C [o o o o o . . . . . . . . . o o ]
if self.tail <= self.head {
// A
// Nop
} else if self.head < old_cap - self.tail {
// B
self.copy_nonoverlapping(old_cap, 0, self.head);
self.head += old_cap;
debug_assert!(self.head > self.tail);
} else {
// C
let new_tail = new_cap - (old_cap - self.tail);
self.copy_nonoverlapping(new_tail, self.tail, old_cap - self.tail);
self.tail = new_tail;
debug_assert!(self.head < self.tail);
}
debug_assert!(self.head < self.cap());
debug_assert!(self.tail < self.cap());
debug_assert!(self.cap().count_ones() == 1);
}
}
impl<T> VecDeque<T> {
/// Creates an empty `VecDeque`.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let vector: VecDeque<u32> = VecDeque::new();
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn new() -> VecDeque<T> {
VecDeque::with_capacity(INITIAL_CAPACITY)
}
/// Creates an empty `VecDeque` with space for at least `n` elements.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let vector: VecDeque<u32> = VecDeque::with_capacity(10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn with_capacity(n: usize) -> VecDeque<T> {
// +1 since the ringbuffer always leaves one space empty
let cap = cmp::max(n + 1, MINIMUM_CAPACITY + 1).next_power_of_two();
assert!(cap > n, "capacity overflow");
VecDeque {
tail: 0,
head: 0,
buf: RawVec::with_capacity(cap),
}
}
/// Retrieves an element in the `VecDeque` by index.
///
/// Element at index 0 is the front of the queue.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf = VecDeque::new();
/// buf.push_back(3);
/// buf.push_back(4);
/// buf.push_back(5);
/// assert_eq!(buf.get(1), Some(&4));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn get(&self, index: usize) -> Option<&T> {
if index < self.len() {
let idx = self.wrap_add(self.tail, index);
unsafe { Some(&*self.ptr().offset(idx as isize)) }
} else {
None
}
}
/// Retrieves an element in the `VecDeque` mutably by index.
///
/// Element at index 0 is the front of the queue.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf = VecDeque::new();
/// buf.push_back(3);
/// buf.push_back(4);
/// buf.push_back(5);
/// if let Some(elem) = buf.get_mut(1) {
/// *elem = 7;
/// }
///
/// assert_eq!(buf[1], 7);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
if index < self.len() {
let idx = self.wrap_add(self.tail, index);
unsafe { Some(&mut *self.ptr().offset(idx as isize)) }
} else {
None
}
}
/// Swaps elements at indices `i` and `j`.
///
/// `i` and `j` may be equal.
///
/// Fails if there is no element with either index.
///
/// Element at index 0 is the front of the queue.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf = VecDeque::new();
/// buf.push_back(3);
/// buf.push_back(4);
/// buf.push_back(5);
/// buf.swap(0, 2);
/// assert_eq!(buf[0], 5);
/// assert_eq!(buf[2], 3);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn swap(&mut self, i: usize, j: usize) {
assert!(i < self.len());
assert!(j < self.len());
let ri = self.wrap_add(self.tail, i);
let rj = self.wrap_add(self.tail, j);
unsafe {
ptr::swap(self.ptr().offset(ri as isize),
self.ptr().offset(rj as isize))
}
}
/// Returns the number of elements the `VecDeque` can hold without
/// reallocating.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let buf: VecDeque<i32> = VecDeque::with_capacity(10);
/// assert!(buf.capacity() >= 10);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn capacity(&self) -> usize {
self.cap() - 1
}
/// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the
/// given `VecDeque`. Does nothing if the capacity is already sufficient.
///
/// Note that the allocator may give the collection more space than it requests. Therefore
/// capacity can not be relied upon to be precisely minimal. Prefer `reserve` if future
/// insertions are expected.
///
/// # Panics
///
/// Panics if the new capacity overflows `usize`.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf: VecDeque<i32> = vec![1].into_iter().collect();
/// buf.reserve_exact(10);
/// assert!(buf.capacity() >= 11);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve_exact(&mut self, additional: usize) {
self.reserve(additional);
}
/// Reserves capacity for at least `additional` more elements to be inserted in the given
/// `VecDeque`. The collection may reserve more space to avoid frequent reallocations.
///
/// # Panics
///
/// Panics if the new capacity overflows `usize`.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf: VecDeque<i32> = vec![1].into_iter().collect();
/// buf.reserve(10);
/// assert!(buf.capacity() >= 11);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve(&mut self, additional: usize) {
let old_cap = self.cap();
let used_cap = self.len() + 1;
let new_cap = used_cap.checked_add(additional)
.and_then(|needed_cap| needed_cap.checked_next_power_of_two())
.expect("capacity overflow");
if new_cap > self.capacity() {
self.buf.reserve_exact(used_cap, new_cap - used_cap);
unsafe {
self.handle_cap_increase(old_cap);
}
}
}
/// Shrinks the capacity of the `VecDeque` as much as possible.
///
/// It will drop down as close as possible to the length but the allocator may still inform the
/// `VecDeque` that there is space for a few more elements.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf = VecDeque::with_capacity(15);
/// buf.extend(0..4);
/// assert_eq!(buf.capacity(), 15);
/// buf.shrink_to_fit();
/// assert!(buf.capacity() >= 4);
/// ```
#[stable(feature = "deque_extras_15", since = "1.5.0")]
pub fn shrink_to_fit(&mut self) {
// +1 since the ringbuffer always leaves one space empty
// len + 1 can't overflow for an existing, well-formed ringbuffer.
let target_cap = cmp::max(self.len() + 1, MINIMUM_CAPACITY + 1).next_power_of_two();
if target_cap < self.cap() {
// There are three cases of interest:
// All elements are out of desired bounds
// Elements are contiguous, and head is out of desired bounds
// Elements are discontiguous, and tail is out of desired bounds
//
// At all other times, element positions are unaffected.
//
// Indicates that elements at the head should be moved.
let head_outside = self.head == 0 || self.head >= target_cap;
// Move elements from out of desired bounds (positions after target_cap)
if self.tail >= target_cap && head_outside {
// T H
// [. . . . . . . . o o o o o o o . ]
// T H
// [o o o o o o o . ]
unsafe {
self.copy_nonoverlapping(0, self.tail, self.len());
}
self.head = self.len();
self.tail = 0;
} else if self.tail != 0 && self.tail < target_cap && head_outside {
// T H
// [. . . o o o o o o o . . . . . . ]
// H T
// [o o . o o o o o ]
let len = self.wrap_sub(self.head, target_cap);
unsafe {
self.copy_nonoverlapping(0, target_cap, len);
}
self.head = len;
debug_assert!(self.head < self.tail);
} else if self.tail >= target_cap {
// H T
// [o o o o o . . . . . . . . . o o ]
// H T
// [o o o o o . o o ]
debug_assert!(self.wrap_sub(self.head, 1) < target_cap);
let len = self.cap() - self.tail;
let new_tail = target_cap - len;
unsafe {
self.copy_nonoverlapping(new_tail, self.tail, len);
}
self.tail = new_tail;
debug_assert!(self.head < self.tail);
}
self.buf.shrink_to_fit(target_cap);
debug_assert!(self.head < self.cap());
debug_assert!(self.tail < self.cap());
debug_assert!(self.cap().count_ones() == 1);
}
}
/// Shortens a `VecDeque`, dropping excess elements from the back.
///
/// If `len` is greater than the `VecDeque`'s current length, this has no
/// effect.
///
/// # Examples
///
/// ```
/// #![feature(deque_extras)]
///
/// use std::collections::VecDeque;
///
/// let mut buf = VecDeque::new();
/// buf.push_back(5);
/// buf.push_back(10);
/// buf.push_back(15);
/// buf.truncate(1);
/// assert_eq!(buf.len(), 1);
/// assert_eq!(Some(&5), buf.get(0));
/// ```
#[unstable(feature = "deque_extras",
reason = "matches collection reform specification; waiting on panic semantics",
issue = "27788")]
pub fn truncate(&mut self, len: usize) {
for _ in len..self.len() {
self.pop_back();
}
}
/// Returns a front-to-back iterator.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf = VecDeque::new();
/// buf.push_back(5);
/// buf.push_back(3);
/// buf.push_back(4);
/// let b: &[_] = &[&5, &3, &4];
/// let c: Vec<&i32> = buf.iter().collect();
/// assert_eq!(&c[..], b);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter(&self) -> Iter<T> {
Iter {
tail: self.tail,
head: self.head,
ring: unsafe { self.buffer_as_slice() },
}
}
/// Returns a front-to-back iterator that returns mutable references.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf = VecDeque::new();
/// buf.push_back(5);
/// buf.push_back(3);
/// buf.push_back(4);
/// for num in buf.iter_mut() {
/// *num = *num - 2;
/// }
/// let b: &[_] = &[&mut 3, &mut 1, &mut 2];
/// assert_eq!(&buf.iter_mut().collect::<Vec<&mut i32>>()[..], b);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter_mut(&mut self) -> IterMut<T> {
IterMut {
tail: self.tail,
head: self.head,
ring: unsafe { self.buffer_as_mut_slice() },
}
}
/// Returns a pair of slices which contain, in order, the contents of the
/// `VecDeque`.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut vector: VecDeque<u32> = VecDeque::new();
///
/// vector.push_back(0);
/// vector.push_back(1);
/// vector.push_back(2);
///
/// assert_eq!(vector.as_slices(), (&[0u32, 1, 2] as &[u32], &[] as &[u32]));
///
/// vector.push_front(10);
/// vector.push_front(9);
///
/// assert_eq!(vector.as_slices(), (&[9u32, 10] as &[u32], &[0u32, 1, 2] as &[u32]));
/// ```
#[inline]
#[stable(feature = "deque_extras_15", since = "1.5.0")]
pub fn as_slices(&self) -> (&[T], &[T]) {
unsafe {
let contiguous = self.is_contiguous();
let buf = self.buffer_as_slice();
if contiguous {
let (empty, buf) = buf.split_at(0);
(&buf[self.tail..self.head], empty)
} else {
let (mid, right) = buf.split_at(self.tail);
let (left, _) = mid.split_at(self.head);
(right, left)
}
}
}
/// Returns a pair of slices which contain, in order, the contents of the
/// `VecDeque`.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut vector: VecDeque<u32> = VecDeque::new();
///
/// vector.push_back(0);
/// vector.push_back(1);
///
/// vector.push_front(10);
/// vector.push_front(9);
///
/// vector.as_mut_slices().0[0] = 42;
/// vector.as_mut_slices().1[0] = 24;
/// assert_eq!(vector.as_slices(), (&[42u32, 10] as &[u32], &[24u32, 1] as &[u32]));
/// ```
#[inline]
#[stable(feature = "deque_extras_15", since = "1.5.0")]
pub fn as_mut_slices(&mut self) -> (&mut [T], &mut [T]) {
unsafe {
let contiguous = self.is_contiguous();
let head = self.head;
let tail = self.tail;
let buf = self.buffer_as_mut_slice();
if contiguous {
let (empty, buf) = buf.split_at_mut(0);
(&mut buf[tail..head], empty)
} else {
let (mid, right) = buf.split_at_mut(tail);
let (left, _) = mid.split_at_mut(head);
(right, left)
}
}
}
/// Returns the number of elements in the `VecDeque`.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut v = VecDeque::new();
/// assert_eq!(v.len(), 0);
/// v.push_back(1);
/// assert_eq!(v.len(), 1);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len(&self) -> usize {
count(self.tail, self.head, self.cap())
}
/// Returns true if the buffer contains no elements
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut v = VecDeque::new();
/// assert!(v.is_empty());
/// v.push_front(1);
/// assert!(!v.is_empty());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Create a draining iterator that removes the specified range in the
/// `VecDeque` and yields the removed items.
///
/// Note 1: The element range is removed even if the iterator is not
/// consumed until the end.
///
/// Note 2: It is unspecified how many elements are removed from the deque,
/// if the `Drain` value is not dropped, but the borrow it holds expires
/// (eg. due to mem::forget).
///
/// # Panics
///
/// Panics if the starting point is greater than the end point or if
/// the end point is greater than the length of the vector.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut v: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
/// assert_eq!(vec![3].into_iter().collect::<VecDeque<_>>(), v.drain(2..).collect());
/// assert_eq!(vec![1, 2].into_iter().collect::<VecDeque<_>>(), v);
///
/// // A full range clears all contents
/// v.drain(..);
/// assert!(v.is_empty());
/// ```
#[inline]
#[stable(feature = "drain", since = "1.6.0")]
pub fn drain<R>(&mut self, range: R) -> Drain<T>
where R: RangeArgument<usize>
{
// Memory safety
//
// When the Drain is first created, the source deque is shortened to
// make sure no uninitialized or moved-from elements are accessible at
// all if the Drain's destructor never gets to run.
//
// Drain will ptr::read out the values to remove.
// When finished, the remaining data will be copied back to cover the hole,
// and the head/tail values will be restored correctly.
//
let len = self.len();
let start = *range.start().unwrap_or(&0);
let end = *range.end().unwrap_or(&len);
assert!(start <= end, "drain lower bound was too large");
assert!(end <= len, "drain upper bound was too large");
// The deque's elements are parted into three segments:
// * self.tail -> drain_tail
// * drain_tail -> drain_head
// * drain_head -> self.head
//
// T = self.tail; H = self.head; t = drain_tail; h = drain_head
//
// We store drain_tail as self.head, and drain_head and self.head as
// after_tail and after_head respectively on the Drain. This also
// truncates the effective array such that if the Drain is leaked, we
// have forgotten about the potentially moved values after the start of
// the drain.
//
// T t h H
// [. . . o o x x o o . . .]
//
let drain_tail = self.wrap_add(self.tail, start);
let drain_head = self.wrap_add(self.tail, end);
let head = self.head;
// "forget" about the values after the start of the drain until after
// the drain is complete and the Drain destructor is run.
self.head = drain_tail;
Drain {
deque: self as *mut _,
after_tail: drain_head,
after_head: head,
iter: Iter {
tail: drain_tail,
head: drain_head,
ring: unsafe { self.buffer_as_mut_slice() },
},
}
}
/// Clears the buffer, removing all values.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut v = VecDeque::new();
/// v.push_back(1);
/// v.clear();
/// assert!(v.is_empty());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn clear(&mut self) {
self.drain(..);
}
/// Returns `true` if the `VecDeque` contains an element equal to the
/// given value.
///
/// # Examples
///
/// ```
/// #![feature(vec_deque_contains)]
///
/// use std::collections::VecDeque;
///
/// let mut vector: VecDeque<u32> = VecDeque::new();
///
/// vector.push_back(0);
/// vector.push_back(1);
///
/// assert_eq!(vector.contains(&1), true);
/// assert_eq!(vector.contains(&10), false);
/// ```
#[unstable(feature = "vec_deque_contains", reason = "recently added",
issue = "32630")]
pub fn contains(&self, x: &T) -> bool
where T: PartialEq<T>
{
let (a, b) = self.as_slices();
a.contains(x) || b.contains(x)
}
/// Provides a reference to the front element, or `None` if the sequence is
/// empty.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut d = VecDeque::new();
/// assert_eq!(d.front(), None);
///
/// d.push_back(1);
/// d.push_back(2);
/// assert_eq!(d.front(), Some(&1));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn front(&self) -> Option<&T> {
if !self.is_empty() {
Some(&self[0])
} else {
None
}
}
/// Provides a mutable reference to the front element, or `None` if the
/// sequence is empty.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut d = VecDeque::new();
/// assert_eq!(d.front_mut(), None);
///
/// d.push_back(1);
/// d.push_back(2);
/// match d.front_mut() {
/// Some(x) => *x = 9,
/// None => (),
/// }
/// assert_eq!(d.front(), Some(&9));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn front_mut(&mut self) -> Option<&mut T> {
if !self.is_empty() {
Some(&mut self[0])
} else {
None
}
}
/// Provides a reference to the back element, or `None` if the sequence is
/// empty.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut d = VecDeque::new();
/// assert_eq!(d.back(), None);
///
/// d.push_back(1);
/// d.push_back(2);
/// assert_eq!(d.back(), Some(&2));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn back(&self) -> Option<&T> {
if !self.is_empty() {
Some(&self[self.len() - 1])
} else {
None
}
}
/// Provides a mutable reference to the back element, or `None` if the
/// sequence is empty.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut d = VecDeque::new();
/// assert_eq!(d.back(), None);
///
/// d.push_back(1);
/// d.push_back(2);
/// match d.back_mut() {
/// Some(x) => *x = 9,
/// None => (),
/// }
/// assert_eq!(d.back(), Some(&9));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn back_mut(&mut self) -> Option<&mut T> {
let len = self.len();
if !self.is_empty() {
Some(&mut self[len - 1])
} else {
None
}
}
/// Removes the first element and returns it, or `None` if the sequence is
/// empty.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut d = VecDeque::new();
/// d.push_back(1);
/// d.push_back(2);
///
/// assert_eq!(d.pop_front(), Some(1));
/// assert_eq!(d.pop_front(), Some(2));
/// assert_eq!(d.pop_front(), None);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn pop_front(&mut self) -> Option<T> {
if self.is_empty() {
None
} else {
let tail = self.tail;
self.tail = self.wrap_add(self.tail, 1);
unsafe { Some(self.buffer_read(tail)) }
}
}
/// Inserts an element first in the sequence.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut d = VecDeque::new();
/// d.push_front(1);
/// d.push_front(2);
/// assert_eq!(d.front(), Some(&2));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn push_front(&mut self, value: T) {
if self.is_full() {
let old_cap = self.cap();
self.buf.double();
unsafe {
self.handle_cap_increase(old_cap);
}
debug_assert!(!self.is_full());
}
self.tail = self.wrap_sub(self.tail, 1);
let tail = self.tail;
unsafe {
self.buffer_write(tail, value);
}
}
/// Appends an element to the back of a buffer
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf = VecDeque::new();
/// buf.push_back(1);
/// buf.push_back(3);
/// assert_eq!(3, *buf.back().unwrap());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn push_back(&mut self, value: T) {
if self.is_full() {
let old_cap = self.cap();
self.buf.double();
unsafe {
self.handle_cap_increase(old_cap);
}
debug_assert!(!self.is_full());
}
let head = self.head;
self.head = self.wrap_add(self.head, 1);
unsafe { self.buffer_write(head, value) }
}
/// Removes the last element from a buffer and returns it, or `None` if
/// it is empty.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf = VecDeque::new();
/// assert_eq!(buf.pop_back(), None);
/// buf.push_back(1);
/// buf.push_back(3);
/// assert_eq!(buf.pop_back(), Some(3));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn pop_back(&mut self) -> Option<T> {
if self.is_empty() {
None
} else {
self.head = self.wrap_sub(self.head, 1);
let head = self.head;
unsafe { Some(self.buffer_read(head)) }
}
}
#[inline]
fn is_contiguous(&self) -> bool {
self.tail <= self.head
}
/// Removes an element from anywhere in the `VecDeque` and returns it, replacing it with the
/// last element.
///
/// This does not preserve ordering, but is O(1).
///
/// Returns `None` if `index` is out of bounds.
///
/// Element at index 0 is the front of the queue.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf = VecDeque::new();
/// assert_eq!(buf.swap_remove_back(0), None);
/// buf.push_back(1);
/// buf.push_back(2);
/// buf.push_back(3);
///
/// assert_eq!(buf.swap_remove_back(0), Some(1));
/// assert_eq!(buf.len(), 2);
/// assert_eq!(buf[0], 3);
/// assert_eq!(buf[1], 2);
/// ```
#[stable(feature = "deque_extras_15", since = "1.5.0")]
pub fn swap_remove_back(&mut self, index: usize) -> Option<T> {
let length = self.len();
if length > 0 && index < length - 1 {
self.swap(index, length - 1);
} else if index >= length {
return None;
}
self.pop_back()
}
/// Removes an element from anywhere in the `VecDeque` and returns it,
/// replacing it with the first element.
///
/// This does not preserve ordering, but is O(1).
///
/// Returns `None` if `index` is out of bounds.
///
/// Element at index 0 is the front of the queue.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf = VecDeque::new();
/// assert_eq!(buf.swap_remove_front(0), None);
/// buf.push_back(1);
/// buf.push_back(2);
/// buf.push_back(3);
///
/// assert_eq!(buf.swap_remove_front(2), Some(3));
/// assert_eq!(buf.len(), 2);
/// assert_eq!(buf[0], 2);
/// assert_eq!(buf[1], 1);
/// ```
#[stable(feature = "deque_extras_15", since = "1.5.0")]
pub fn swap_remove_front(&mut self, index: usize) -> Option<T> {
let length = self.len();
if length > 0 && index < length && index != 0 {
self.swap(index, 0);
} else if index >= length {
return None;
}
self.pop_front()
}
/// Inserts an element at `index` within the `VecDeque`. Whichever
/// end is closer to the insertion point will be moved to make room,
/// and all the affected elements will be moved to new positions.
///
/// Element at index 0 is the front of the queue.
///
/// # Panics
///
/// Panics if `index` is greater than `VecDeque`'s length
///
/// # Examples
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf = VecDeque::new();
/// buf.push_back(10);
/// buf.push_back(12);
/// buf.insert(1, 11);
/// assert_eq!(Some(&11), buf.get(1));
/// ```
#[stable(feature = "deque_extras_15", since = "1.5.0")]
pub fn insert(&mut self, index: usize, value: T) {
assert!(index <= self.len(), "index out of bounds");
if self.is_full() {
let old_cap = self.cap();
self.buf.double();
unsafe {
self.handle_cap_increase(old_cap);
}
debug_assert!(!self.is_full());
}
// Move the least number of elements in the ring buffer and insert
// the given object
//
// At most len/2 - 1 elements will be moved. O(min(n, n-i))
//
// There are three main cases:
// Elements are contiguous
// - special case when tail is 0
// Elements are discontiguous and the insert is in the tail section
// Elements are discontiguous and the insert is in the head section
//
// For each of those there are two more cases:
// Insert is closer to tail
// Insert is closer to head
//
// Key: H - self.head
// T - self.tail
// o - Valid element
// I - Insertion element
// A - The element that should be after the insertion point
// M - Indicates element was moved
let idx = self.wrap_add(self.tail, index);
let distance_to_tail = index;
let distance_to_head = self.len() - index;
let contiguous = self.is_contiguous();
match (contiguous,
distance_to_tail <= distance_to_head,
idx >= self.tail) {
(true, true, _) if index == 0 => {
// push_front
//
// T
// I H
// [A o o o o o o . . . . . . . . .]
//
// H T
// [A o o o o o o o . . . . . I]
//
self.tail = self.wrap_sub(self.tail, 1);
}
(true, true, _) => {
unsafe {
// contiguous, insert closer to tail:
//
// T I H
// [. . . o o A o o o o . . . . . .]
//
// T H
// [. . o o I A o o o o . . . . . .]
// M M
//
// contiguous, insert closer to tail and tail is 0:
//
//
// T I H
// [o o A o o o o . . . . . . . . .]
//
// H T
// [o I A o o o o o . . . . . . . o]
// M M
let new_tail = self.wrap_sub(self.tail, 1);
self.copy(new_tail, self.tail, 1);
// Already moved the tail, so we only copy `index - 1` elements.
self.copy(self.tail, self.tail + 1, index - 1);
self.tail = new_tail;
}
}
(true, false, _) => {
unsafe {
// contiguous, insert closer to head:
//
// T I H
// [. . . o o o o A o o . . . . . .]
//
// T H
// [. . . o o o o I A o o . . . . .]
// M M M
self.copy(idx + 1, idx, self.head - idx);
self.head = self.wrap_add(self.head, 1);
}
}
(false, true, true) => {
unsafe {
// discontiguous, insert closer to tail, tail section:
//
// H T I
// [o o o o o o . . . . . o o A o o]
//
// H T
// [o o o o o o . . . . o o I A o o]
// M M
self.copy(self.tail - 1, self.tail, index);
self.tail -= 1;
}
}
(false, false, true) => {
unsafe {
// discontiguous, insert closer to head, tail section:
//
// H T I
// [o o . . . . . . . o o o o o A o]
//
// H T
// [o o o . . . . . . o o o o o I A]
// M M M M
// copy elements up to new head
self.copy(1, 0, self.head);
// copy last element into empty spot at bottom of buffer
self.copy(0, self.cap() - 1, 1);
// move elements from idx to end forward not including ^ element
self.copy(idx + 1, idx, self.cap() - 1 - idx);
self.head += 1;
}
}
(false, true, false) if idx == 0 => {
unsafe {
// discontiguous, insert is closer to tail, head section,
// and is at index zero in the internal buffer:
//
// I H T
// [A o o o o o o o o o . . . o o o]
//
// H T
// [A o o o o o o o o o . . o o o I]
// M M M
// copy elements up to new tail
self.copy(self.tail - 1, self.tail, self.cap() - self.tail);
// copy last element into empty spot at bottom of buffer
self.copy(self.cap() - 1, 0, 1);
self.tail -= 1;
}
}
(false, true, false) => {
unsafe {
// discontiguous, insert closer to tail, head section:
//
// I H T
// [o o o A o o o o o o . . . o o o]
//
// H T
// [o o I A o o o o o o . . o o o o]
// M M M M M M
// copy elements up to new tail
self.copy(self.tail - 1, self.tail, self.cap() - self.tail);
// copy last element into empty spot at bottom of buffer
self.copy(self.cap() - 1, 0, 1);
// move elements from idx-1 to end forward not including ^ element
self.copy(0, 1, idx - 1);
self.tail -= 1;
}
}
(false, false, false) => {
unsafe {
// discontiguous, insert closer to head, head section:
//
// I H T
// [o o o o A o o . . . . . . o o o]
//
// H T
// [o o o o I A o o . . . . . o o o]
// M M M
self.copy(idx + 1, idx, self.head - idx);
self.head += 1;
}
}
}
// tail might've been changed so we need to recalculate
let new_idx = self.wrap_add(self.tail, index);
unsafe {
self.buffer_write(new_idx, value);
}
}
/// Removes and returns the element at `index` from the `VecDeque`.
/// Whichever end is closer to the removal point will be moved to make
/// room, and all the affected elements will be moved to new positions.
/// Returns `None` if `index` is out of bounds.
///
/// Element at index 0 is the front of the queue.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf = VecDeque::new();
/// buf.push_back(1);
/// buf.push_back(2);
/// buf.push_back(3);
///
/// assert_eq!(buf.remove(1), Some(2));
/// assert_eq!(buf.get(1), Some(&3));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove(&mut self, index: usize) -> Option<T> {
if self.is_empty() || self.len() <= index {
return None;
}
// There are three main cases:
// Elements are contiguous
// Elements are discontiguous and the removal is in the tail section
// Elements are discontiguous and the removal is in the head section
// - special case when elements are technically contiguous,
// but self.head = 0
//
// For each of those there are two more cases:
// Insert is closer to tail
// Insert is closer to head
//
// Key: H - self.head
// T - self.tail
// o - Valid element
// x - Element marked for removal
// R - Indicates element that is being removed
// M - Indicates element was moved
let idx = self.wrap_add(self.tail, index);
let elem = unsafe { Some(self.buffer_read(idx)) };
let distance_to_tail = index;
let distance_to_head = self.len() - index;
let contiguous = self.is_contiguous();
match (contiguous,
distance_to_tail <= distance_to_head,
idx >= self.tail) {
(true, true, _) => {
unsafe {
// contiguous, remove closer to tail:
//
// T R H
// [. . . o o x o o o o . . . . . .]
//
// T H
// [. . . . o o o o o o . . . . . .]
// M M
self.copy(self.tail + 1, self.tail, index);
self.tail += 1;
}
}
(true, false, _) => {
unsafe {
// contiguous, remove closer to head:
//
// T R H
// [. . . o o o o x o o . . . . . .]
//
// T H
// [. . . o o o o o o . . . . . . .]
// M M
self.copy(idx, idx + 1, self.head - idx - 1);
self.head -= 1;
}
}
(false, true, true) => {
unsafe {
// discontiguous, remove closer to tail, tail section:
//
// H T R
// [o o o o o o . . . . . o o x o o]
//
// H T
// [o o o o o o . . . . . . o o o o]
// M M
self.copy(self.tail + 1, self.tail, index);
self.tail = self.wrap_add(self.tail, 1);
}
}
(false, false, false) => {
unsafe {
// discontiguous, remove closer to head, head section:
//
// R H T
// [o o o o x o o . . . . . . o o o]
//
// H T
// [o o o o o o . . . . . . . o o o]
// M M
self.copy(idx, idx + 1, self.head - idx - 1);
self.head -= 1;
}
}
(false, false, true) => {
unsafe {
// discontiguous, remove closer to head, tail section:
//
// H T R
// [o o o . . . . . . o o o o o x o]
//
// H T
// [o o . . . . . . . o o o o o o o]
// M M M M
//
// or quasi-discontiguous, remove next to head, tail section:
//
// H T R
// [. . . . . . . . . o o o o o x o]
//
// T H
// [. . . . . . . . . o o o o o o .]
// M
// draw in elements in the tail section
self.copy(idx, idx + 1, self.cap() - idx - 1);
// Prevents underflow.
if self.head != 0 {
// copy first element into empty spot
self.copy(self.cap() - 1, 0, 1);
// move elements in the head section backwards
self.copy(0, 1, self.head - 1);
}
self.head = self.wrap_sub(self.head, 1);
}
}
(false, true, false) => {
unsafe {
// discontiguous, remove closer to tail, head section:
//
// R H T
// [o o x o o o o o o o . . . o o o]
//
// H T
// [o o o o o o o o o o . . . . o o]
// M M M M M
// draw in elements up to idx
self.copy(1, 0, idx);
// copy last element into empty spot
self.copy(0, self.cap() - 1, 1);
// move elements from tail to end forward, excluding the last one
self.copy(self.tail + 1, self.tail, self.cap() - self.tail - 1);
self.tail = self.wrap_add(self.tail, 1);
}
}
}
return elem;
}
/// Splits the collection into two at the given index.
///
/// Returns a newly allocated `Self`. `self` contains elements `[0, at)`,
/// and the returned `Self` contains elements `[at, len)`.
///
/// Note that the capacity of `self` does not change.
///
/// Element at index 0 is the front of the queue.
///
/// # Panics
///
/// Panics if `at > len`
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf: VecDeque<_> = vec![1,2,3].into_iter().collect();
/// let buf2 = buf.split_off(1);
/// // buf = [1], buf2 = [2, 3]
/// assert_eq!(buf.len(), 1);
/// assert_eq!(buf2.len(), 2);
/// ```
#[inline]
#[stable(feature = "split_off", since = "1.4.0")]
pub fn split_off(&mut self, at: usize) -> Self {
let len = self.len();
assert!(at <= len, "`at` out of bounds");
let other_len = len - at;
let mut other = VecDeque::with_capacity(other_len);
unsafe {
let (first_half, second_half) = self.as_slices();
let first_len = first_half.len();
let second_len = second_half.len();
if at < first_len {
// `at` lies in the first half.
let amount_in_first = first_len - at;
ptr::copy_nonoverlapping(first_half.as_ptr().offset(at as isize),
other.ptr(),
amount_in_first);
// just take all of the second half.
ptr::copy_nonoverlapping(second_half.as_ptr(),
other.ptr().offset(amount_in_first as isize),
second_len);
} else {
// `at` lies in the second half, need to factor in the elements we skipped
// in the first half.
let offset = at - first_len;
let amount_in_second = second_len - offset;
ptr::copy_nonoverlapping(second_half.as_ptr().offset(offset as isize),
other.ptr(),
amount_in_second);
}
}
// Cleanup where the ends of the buffers are
self.head = self.wrap_sub(self.head, other_len);
other.head = other.wrap_index(other_len);
other
}
/// Moves all the elements of `other` into `Self`, leaving `other` empty.
///
/// # Panics
///
/// Panics if the new number of elements in self overflows a `usize`.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
/// let mut buf2: VecDeque<_> = vec![4, 5, 6].into_iter().collect();
/// buf.append(&mut buf2);
/// assert_eq!(buf.len(), 6);
/// assert_eq!(buf2.len(), 0);
/// ```
#[inline]
#[stable(feature = "append", since = "1.4.0")]
pub fn append(&mut self, other: &mut Self) {
// naive impl
self.extend(other.drain(..));
}
/// Retains only the elements specified by the predicate.
///
/// In other words, remove all elements `e` such that `f(&e)` returns false.
/// This method operates in place and preserves the order of the retained
/// elements.
///
/// # Examples
///
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf = VecDeque::new();
/// buf.extend(1..5);
/// buf.retain(|&x| x%2 == 0);
///
/// let v: Vec<_> = buf.into_iter().collect();
/// assert_eq!(&v[..], &[2, 4]);
/// ```
#[stable(feature = "vec_deque_retain", since = "1.4.0")]
pub fn retain<F>(&mut self, mut f: F)
where F: FnMut(&T) -> bool
{
let len = self.len();
let mut del = 0;
for i in 0..len {
if !f(&self[i]) {
del += 1;
} else if del > 0 {
self.swap(i - del, i);
}
}
if del > 0 {
self.truncate(len - del);
}
}
}
impl<T: Clone> VecDeque<T> {
/// Modifies the `VecDeque` in-place so that `len()` is equal to new_len,
/// either by removing excess elements or by appending copies of a value to the back.
///
/// # Examples
///
/// ```
/// #![feature(deque_extras)]
///
/// use std::collections::VecDeque;
///
/// let mut buf = VecDeque::new();
/// buf.push_back(5);
/// buf.push_back(10);
/// buf.push_back(15);
/// buf.resize(2, 0);
/// buf.resize(6, 20);
/// for (a, b) in [5, 10, 20, 20, 20, 20].iter().zip(&buf) {
/// assert_eq!(a, b);
/// }
/// ```
#[unstable(feature = "deque_extras",
reason = "matches collection reform specification; waiting on panic semantics",
issue = "27788")]
pub fn resize(&mut self, new_len: usize, value: T) {
let len = self.len();
if new_len > len {
self.extend(repeat(value).take(new_len - len))
} else {
self.truncate(new_len);
}
}
}
/// Returns the index in the underlying buffer for a given logical element index.
#[inline]
fn wrap_index(index: usize, size: usize) -> usize {
// size is always a power of 2
debug_assert!(size.is_power_of_two());
index & (size - 1)
}
/// Calculate the number of elements left to be read in the buffer
#[inline]
fn count(tail: usize, head: usize, size: usize) -> usize {
// size is always a power of 2
(head.wrapping_sub(tail)) & (size - 1)
}
/// `VecDeque` iterator.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, T: 'a> {
ring: &'a [T],
tail: usize,
head: usize,
}
// FIXME(#19839) Remove in favor of `#[derive(Clone)]`
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Clone for Iter<'a, T> {
fn clone(&self) -> Iter<'a, T> {
Iter {
ring: self.ring,
tail: self.tail,
head: self.head,
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Iterator for Iter<'a, T> {
type Item = &'a T;
#[inline]
fn next(&mut self) -> Option<&'a T> {
if self.tail == self.head {
return None;
}
let tail = self.tail;
self.tail = wrap_index(self.tail.wrapping_add(1), self.ring.len());
unsafe { Some(self.ring.get_unchecked(tail)) }
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let len = count(self.tail, self.head, self.ring.len());
(len, Some(len))
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a T> {
if self.tail == self.head {
return None;
}
self.head = wrap_index(self.head.wrapping_sub(1), self.ring.len());
unsafe { Some(self.ring.get_unchecked(self.head)) }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
/// `VecDeque` mutable iterator.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IterMut<'a, T: 'a> {
ring: &'a mut [T],
tail: usize,
head: usize,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Iterator for IterMut<'a, T> {
type Item = &'a mut T;
#[inline]
fn next(&mut self) -> Option<&'a mut T> {
if self.tail == self.head {
return None;
}
let tail = self.tail;
self.tail = wrap_index(self.tail.wrapping_add(1), self.ring.len());
unsafe {
let elem = self.ring.get_unchecked_mut(tail);
Some(&mut *(elem as *mut _))
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let len = count(self.tail, self.head, self.ring.len());
(len, Some(len))
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a mut T> {
if self.tail == self.head {
return None;
}
self.head = wrap_index(self.head.wrapping_sub(1), self.ring.len());
unsafe {
let elem = self.ring.get_unchecked_mut(self.head);
Some(&mut *(elem as *mut _))
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for IterMut<'a, T> {}
/// A by-value VecDeque iterator
#[derive(Clone)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IntoIter<T> {
inner: VecDeque<T>,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Iterator for IntoIter<T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
self.inner.pop_front()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.inner.len();
(len, Some(len))
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> DoubleEndedIterator for IntoIter<T> {
#[inline]
fn next_back(&mut self) -> Option<T> {
self.inner.pop_back()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ExactSizeIterator for IntoIter<T> {}
/// A draining VecDeque iterator
#[stable(feature = "drain", since = "1.6.0")]
pub struct Drain<'a, T: 'a> {
after_tail: usize,
after_head: usize,
iter: Iter<'a, T>,
deque: *mut VecDeque<T>,
}
#[stable(feature = "drain", since = "1.6.0")]
unsafe impl<'a, T: Sync> Sync for Drain<'a, T> {}
#[stable(feature = "drain", since = "1.6.0")]
unsafe impl<'a, T: Send> Send for Drain<'a, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T: 'a> Drop for Drain<'a, T> {
fn drop(&mut self) {
for _ in self.by_ref() {}
let source_deque = unsafe { &mut *self.deque };
// T = source_deque_tail; H = source_deque_head; t = drain_tail; h = drain_head
//
// T t h H
// [. . . o o x x o o . . .]
//
let orig_tail = source_deque.tail;
let drain_tail = source_deque.head;
let drain_head = self.after_tail;
let orig_head = self.after_head;
let tail_len = count(orig_tail, drain_tail, source_deque.cap());
let head_len = count(drain_head, orig_head, source_deque.cap());
// Restore the original head value
source_deque.head = orig_head;
match (tail_len, head_len) {
(0, 0) => {
source_deque.head = 0;
source_deque.tail = 0;
}
(0, _) => {
source_deque.tail = drain_head;
}
(_, 0) => {
source_deque.head = drain_tail;
}
_ => {
unsafe {
if tail_len <= head_len {
source_deque.tail = source_deque.wrap_sub(drain_head, tail_len);
source_deque.wrap_copy(source_deque.tail, orig_tail, tail_len);
} else {
source_deque.head = source_deque.wrap_add(drain_tail, head_len);
source_deque.wrap_copy(drain_tail, drain_head, head_len);
}
}
}
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T: 'a> Iterator for Drain<'a, T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
self.iter.next().map(|elt| unsafe { ptr::read(elt) })
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T: 'a> DoubleEndedIterator for Drain<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<T> {
self.iter.next_back().map(|elt| unsafe { ptr::read(elt) })
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: PartialEq> PartialEq for VecDeque<A> {
fn eq(&self, other: &VecDeque<A>) -> bool {
if self.len() != other.len() {
return false;
}
let (sa, sb) = self.as_slices();
let (oa, ob) = other.as_slices();
if sa.len() == oa.len() {
sa == oa && sb == ob
} else if sa.len() < oa.len() {
// Always divisible in three sections, for example:
// self: [a b c|d e f]
// other: [0 1 2 3|4 5]
// front = 3, mid = 1,
// [a b c] == [0 1 2] && [d] == [3] && [e f] == [4 5]
let front = sa.len();
let mid = oa.len() - front;
let (oa_front, oa_mid) = oa.split_at(front);
let (sb_mid, sb_back) = sb.split_at(mid);
debug_assert_eq!(sa.len(), oa_front.len());
debug_assert_eq!(sb_mid.len(), oa_mid.len());
debug_assert_eq!(sb_back.len(), ob.len());
sa == oa_front && sb_mid == oa_mid && sb_back == ob
} else {
let front = oa.len();
let mid = sa.len() - front;
let (sa_front, sa_mid) = sa.split_at(front);
let (ob_mid, ob_back) = ob.split_at(mid);
debug_assert_eq!(sa_front.len(), oa.len());
debug_assert_eq!(sa_mid.len(), ob_mid.len());
debug_assert_eq!(sb.len(), ob_back.len());
sa_front == oa && sa_mid == ob_mid && sb == ob_back
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: Eq> Eq for VecDeque<A> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: PartialOrd> PartialOrd for VecDeque<A> {
fn partial_cmp(&self, other: &VecDeque<A>) -> Option<Ordering> {
self.iter().partial_cmp(other.iter())
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: Ord> Ord for VecDeque<A> {
#[inline]
fn cmp(&self, other: &VecDeque<A>) -> Ordering {
self.iter().cmp(other.iter())
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: Hash> Hash for VecDeque<A> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.len().hash(state);
let (a, b) = self.as_slices();
Hash::hash_slice(a, state);
Hash::hash_slice(b, state);
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A> Index<usize> for VecDeque<A> {
type Output = A;
#[inline]
fn index(&self, index: usize) -> &A {
self.get(index).expect("Out of bounds access")
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A> IndexMut<usize> for VecDeque<A> {
#[inline]
fn index_mut(&mut self, index: usize) -> &mut A {
self.get_mut(index).expect("Out of bounds access")
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A> FromIterator<A> for VecDeque<A> {
fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> VecDeque<A> {
let iterator = iter.into_iter();
let (lower, _) = iterator.size_hint();
let mut deq = VecDeque::with_capacity(lower);
deq.extend(iterator);
deq
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> IntoIterator for VecDeque<T> {
type Item = T;
type IntoIter = IntoIter<T>;
/// Consumes the list into a front-to-back iterator yielding elements by
/// value.
fn into_iter(self) -> IntoIter<T> {
IntoIter { inner: self }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> IntoIterator for &'a VecDeque<T> {
type Item = &'a T;
type IntoIter = Iter<'a, T>;
fn into_iter(self) -> Iter<'a, T> {
self.iter()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> IntoIterator for &'a mut VecDeque<T> {
type Item = &'a mut T;
type IntoIter = IterMut<'a, T>;
fn into_iter(mut self) -> IterMut<'a, T> {
self.iter_mut()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A> Extend<A> for VecDeque<A> {
fn extend<T: IntoIterator<Item = A>>(&mut self, iter: T) {
for elt in iter {
self.push_back(elt);
}
}
}
#[stable(feature = "extend_ref", since = "1.2.0")]
impl<'a, T: 'a + Copy> Extend<&'a T> for VecDeque<T> {
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
self.extend(iter.into_iter().cloned());
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: fmt::Debug> fmt::Debug for VecDeque<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_list().entries(self).finish()
}
}
#[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")]
impl<T> From<Vec<T>> for VecDeque<T> {
fn from(mut other: Vec<T>) -> Self {
unsafe {
let other_buf = other.as_mut_ptr();
let mut buf = RawVec::from_raw_parts(other_buf, other.capacity());
let len = other.len();
mem::forget(other);
// We need to extend the buf if it's not a power of two, too small
// or doesn't have at least one free space
if !buf.cap().is_power_of_two()
|| (buf.cap() < (MINIMUM_CAPACITY + 1))
|| (buf.cap() == len)
{
let cap = cmp::max(buf.cap() + 1, MINIMUM_CAPACITY + 1).next_power_of_two();
buf.reserve_exact(len, cap - len);
}
VecDeque {
tail: 0,
head: len,
buf: buf
}
}
}
}
#[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")]
impl<T> From<VecDeque<T>> for Vec<T> {
fn from(other: VecDeque<T>) -> Self {
unsafe {
let buf = other.buf.ptr();
let len = other.len();
let tail = other.tail;
let head = other.head;
let cap = other.cap();
// Need to move the ring to the front of the buffer, as vec will expect this.
if other.is_contiguous() {
ptr::copy(buf.offset(tail as isize), buf, len);
} else {
if (tail - head) >= cmp::min((cap - tail), head) {
// There is enough free space in the centre for the shortest block so we can
// do this in at most three copy moves.
if (cap - tail) > head {
// right hand block is the long one; move that enough for the left
ptr::copy(
buf.offset(tail as isize),
buf.offset((tail - head) as isize),
cap - tail);
// copy left in the end
ptr::copy(buf, buf.offset((cap - head) as isize), head);
// shift the new thing to the start
ptr::copy(buf.offset((tail-head) as isize), buf, len);
} else {
// left hand block is the long one, we can do it in two!
ptr::copy(buf, buf.offset((cap-tail) as isize), head);
ptr::copy(buf.offset(tail as isize), buf, cap-tail);
}
} else {
// Need to use N swaps to move the ring
// We can use the space at the end of the ring as a temp store
let mut left_edge: usize = 0;
let mut right_edge: usize = tail;
// The general problem looks like this
// GHIJKLM...ABCDEF - before any swaps
// ABCDEFM...GHIJKL - after 1 pass of swaps
// ABCDEFGHIJM...KL - swap until the left edge reaches the temp store
// - then restart the algorithm with a new (smaller) store
// Sometimes the temp store is reached when the right edge is at the end
// of the buffer - this means we've hit the right order with fewer swaps!
// E.g
// EF..ABCD
// ABCDEF.. - after four only swaps we've finished
while left_edge < len && right_edge != cap {
let mut right_offset = 0;
for i in left_edge..right_edge {
right_offset = (i - left_edge) % (cap - right_edge);
let src: isize = (right_edge + right_offset) as isize;
ptr::swap(buf.offset(i as isize), buf.offset(src));
}
let n_ops = right_edge - left_edge;
left_edge += n_ops;
right_edge += right_offset + 1;
}
}
}
let out = Vec::from_raw_parts(buf, len, cap);
mem::forget(other);
out
}
}
}
#[cfg(test)]
mod tests {
use core::iter::Iterator;
use core::option::Option::Some;
use test;
use super::VecDeque;
#[bench]
fn bench_push_back_100(b: &mut test::Bencher) {
let mut deq = VecDeque::with_capacity(101);
b.iter(|| {
for i in 0..100 {
deq.push_back(i);
}
deq.head = 0;
deq.tail = 0;
})
}
#[bench]
fn bench_push_front_100(b: &mut test::Bencher) {
let mut deq = VecDeque::with_capacity(101);
b.iter(|| {
for i in 0..100 {
deq.push_front(i);
}
deq.head = 0;
deq.tail = 0;
})
}
#[bench]
fn bench_pop_back_100(b: &mut test::Bencher) {
let mut deq = VecDeque::<i32>::with_capacity(101);
b.iter(|| {
deq.head = 100;
deq.tail = 0;
while !deq.is_empty() {
test::black_box(deq.pop_back());
}
})
}
#[bench]
fn bench_pop_front_100(b: &mut test::Bencher) {
let mut deq = VecDeque::<i32>::with_capacity(101);
b.iter(|| {
deq.head = 100;
deq.tail = 0;
while !deq.is_empty() {
test::black_box(deq.pop_front());
}
})
}
#[test]
fn test_swap_front_back_remove() {
fn test(back: bool) {
// This test checks that every single combination of tail position and length is tested.
// Capacity 15 should be large enough to cover every case.
let mut tester = VecDeque::with_capacity(15);
let usable_cap = tester.capacity();
let final_len = usable_cap / 2;
for len in 0..final_len {
let expected = if back {
(0..len).collect()
} else {
(0..len).rev().collect()
};
for tail_pos in 0..usable_cap {
tester.tail = tail_pos;
tester.head = tail_pos;
if back {
for i in 0..len * 2 {
tester.push_front(i);
}
for i in 0..len {
assert_eq!(tester.swap_remove_back(i), Some(len * 2 - 1 - i));
}
} else {
for i in 0..len * 2 {
tester.push_back(i);
}
for i in 0..len {
let idx = tester.len() - 1 - i;
assert_eq!(tester.swap_remove_front(idx), Some(len * 2 - 1 - i));
}
}
assert!(tester.tail < tester.cap());
assert!(tester.head < tester.cap());
assert_eq!(tester, expected);
}
}
}
test(true);
test(false);
}
#[test]
fn test_insert() {
// This test checks that every single combination of tail position, length, and
// insertion position is tested. Capacity 15 should be large enough to cover every case.
let mut tester = VecDeque::with_capacity(15);
// can't guarantee we got 15, so have to get what we got.
// 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
// this test isn't covering what it wants to
let cap = tester.capacity();
// len is the length *after* insertion
for len in 1..cap {
// 0, 1, 2, .., len - 1
let expected = (0..).take(len).collect();
for tail_pos in 0..cap {
for to_insert in 0..len {
tester.tail = tail_pos;
tester.head = tail_pos;
for i in 0..len {
if i != to_insert {
tester.push_back(i);
}
}
tester.insert(to_insert, to_insert);
assert!(tester.tail < tester.cap());
assert!(tester.head < tester.cap());
assert_eq!(tester, expected);
}
}
}
}
#[test]
fn test_remove() {
// This test checks that every single combination of tail position, length, and
// removal position is tested. Capacity 15 should be large enough to cover every case.
let mut tester = VecDeque::with_capacity(15);
// can't guarantee we got 15, so have to get what we got.
// 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
// this test isn't covering what it wants to
let cap = tester.capacity();
// len is the length *after* removal
for len in 0..cap - 1 {
// 0, 1, 2, .., len - 1
let expected = (0..).take(len).collect();
for tail_pos in 0..cap {
for to_remove in 0..len + 1 {
tester.tail = tail_pos;
tester.head = tail_pos;
for i in 0..len {
if i == to_remove {
tester.push_back(1234);
}
tester.push_back(i);
}
if to_remove == len {
tester.push_back(1234);
}
tester.remove(to_remove);
assert!(tester.tail < tester.cap());
assert!(tester.head < tester.cap());
assert_eq!(tester, expected);
}
}
}
}
#[test]
fn test_drain() {
let mut tester: VecDeque<usize> = VecDeque::with_capacity(7);
let cap = tester.capacity();
for len in 0..cap + 1 {
for tail in 0..cap + 1 {
for drain_start in 0..len + 1 {
for drain_end in drain_start..len + 1 {
tester.tail = tail;
tester.head = tail;
for i in 0..len {
tester.push_back(i);
}
// Check that we drain the correct values
let drained: VecDeque<_> = tester.drain(drain_start..drain_end).collect();
let drained_expected: VecDeque<_> = (drain_start..drain_end).collect();
assert_eq!(drained, drained_expected);
// We shouldn't have changed the capacity or made the
// head or tail out of bounds
assert_eq!(tester.capacity(), cap);
assert!(tester.tail < tester.cap());
assert!(tester.head < tester.cap());
// We should see the correct values in the VecDeque
let expected: VecDeque<_> = (0..drain_start)
.chain(drain_end..len)
.collect();
assert_eq!(expected, tester);
}
}
}
}
}
#[test]
fn test_shrink_to_fit() {
// This test checks that every single combination of head and tail position,
// is tested. Capacity 15 should be large enough to cover every case.
let mut tester = VecDeque::with_capacity(15);
// can't guarantee we got 15, so have to get what we got.
// 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
// this test isn't covering what it wants to
let cap = tester.capacity();
tester.reserve(63);
let max_cap = tester.capacity();
for len in 0..cap + 1 {
// 0, 1, 2, .., len - 1
let expected = (0..).take(len).collect();
for tail_pos in 0..max_cap + 1 {
tester.tail = tail_pos;
tester.head = tail_pos;
tester.reserve(63);
for i in 0..len {
tester.push_back(i);
}
tester.shrink_to_fit();
assert!(tester.capacity() <= cap);
assert!(tester.tail < tester.cap());
assert!(tester.head < tester.cap());
assert_eq!(tester, expected);
}
}
}
#[test]
fn test_split_off() {
// This test checks that every single combination of tail position, length, and
// split position is tested. Capacity 15 should be large enough to cover every case.
let mut tester = VecDeque::with_capacity(15);
// can't guarantee we got 15, so have to get what we got.
// 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
// this test isn't covering what it wants to
let cap = tester.capacity();
// len is the length *before* splitting
for len in 0..cap {
// index to split at
for at in 0..len + 1 {
// 0, 1, 2, .., at - 1 (may be empty)
let expected_self = (0..).take(at).collect();
// at, at + 1, .., len - 1 (may be empty)
let expected_other = (at..).take(len - at).collect();
for tail_pos in 0..cap {
tester.tail = tail_pos;
tester.head = tail_pos;
for i in 0..len {
tester.push_back(i);
}
let result = tester.split_off(at);
assert!(tester.tail < tester.cap());
assert!(tester.head < tester.cap());
assert!(result.tail < result.cap());
assert!(result.head < result.cap());
assert_eq!(tester, expected_self);
assert_eq!(result, expected_other);
}
}
}
}
#[test]
fn test_from_vec() {
use super::super::vec::Vec;
for cap in 0..35 {
for len in 0..cap + 1 {
let mut vec = Vec::with_capacity(cap);
vec.extend(0..len);
let vd = VecDeque::from(vec.clone());
assert!(vd.cap().is_power_of_two());
assert_eq!(vd.len(), vec.len());
assert!(vd.into_iter().eq(vec));
}
}
}
#[test]
fn test_vec_from_vecdeque() {
use super::super::vec::Vec;
fn create_vec_and_test_convert(cap: usize, offset: usize, len: usize) {
let mut vd = VecDeque::with_capacity(cap);
for _ in 0..offset {
vd.push_back(0);
vd.pop_front();
}
vd.extend(0..len);
let vec: Vec<_> = Vec::from(vd.clone());
assert_eq!(vec.len(), vd.len());
assert!(vec.into_iter().eq(vd));
}
for cap_pwr in 0..7 {
// Make capacity as a (2^x)-1, so that the ring size is 2^x
let cap = (2i32.pow(cap_pwr) - 1) as usize;
// In these cases there is enough free space to solve it with copies
for len in 0..((cap+1)/2) {
// Test contiguous cases
for offset in 0..(cap-len) {
create_vec_and_test_convert(cap, offset, len)
}
// Test cases where block at end of buffer is bigger than block at start
for offset in (cap-len)..(cap-(len/2)) {
create_vec_and_test_convert(cap, offset, len)
}
// Test cases where block at start of buffer is bigger than block at end
for offset in (cap-(len/2))..cap {
create_vec_and_test_convert(cap, offset, len)
}
}
// Now there's not (necessarily) space to straighten the ring with simple copies,
// the ring will use swapping when:
// (cap + 1 - offset) > (cap + 1 - len) && (len - (cap + 1 - offset)) > (cap + 1 - len))
// right block size > free space && left block size > free space
for len in ((cap+1)/2)..cap {
// Test contiguous cases
for offset in 0..(cap-len) {
create_vec_and_test_convert(cap, offset, len)
}
// Test cases where block at end of buffer is bigger than block at start
for offset in (cap-len)..(cap-(len/2)) {
create_vec_and_test_convert(cap, offset, len)
}
// Test cases where block at start of buffer is bigger than block at end
for offset in (cap-(len/2))..cap {
create_vec_and_test_convert(cap, offset, len)
}
}
}
}
}
| 33.197314 | 100 | 0.458698 |
165d1f1edbacea61438ef3426828a1195336a39e | 29,177 | // This file is Copyright its original authors, visible in version control
// history.
//
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
// You may not use this file except in accordance with one or both of these
// licenses.
macro_rules! encode_tlv {
($stream: expr, $type: expr, $field: expr, (default_value, $default: expr)) => {
encode_tlv!($stream, $type, $field, required)
};
($stream: expr, $type: expr, $field: expr, required) => {
BigSize($type).write($stream)?;
BigSize($field.serialized_length() as u64).write($stream)?;
$field.write($stream)?;
};
($stream: expr, $type: expr, $field: expr, vec_type) => {
encode_tlv!($stream, $type, ::util::ser::VecWriteWrapper(&$field), required);
};
($stream: expr, $optional_type: expr, $optional_field: expr, option) => {
if let Some(ref field) = $optional_field {
BigSize($optional_type).write($stream)?;
BigSize(field.serialized_length() as u64).write($stream)?;
field.write($stream)?;
}
};
}
macro_rules! encode_tlv_stream {
($stream: expr, {$(($type: expr, $field: expr, $fieldty: tt)),* $(,)*}) => { {
#[allow(unused_imports)]
use {
ln::msgs::DecodeError,
util::ser,
util::ser::BigSize,
};
$(
encode_tlv!($stream, $type, $field, $fieldty);
)*
#[allow(unused_mut, unused_variables, unused_assignments)]
#[cfg(debug_assertions)]
{
let mut last_seen: Option<u64> = None;
$(
if let Some(t) = last_seen {
debug_assert!(t <= $type);
}
last_seen = Some($type);
)*
}
} }
}
macro_rules! get_varint_length_prefixed_tlv_length {
($len: expr, $type: expr, $field: expr, (default_value, $default: expr)) => {
get_varint_length_prefixed_tlv_length!($len, $type, $field, required)
};
($len: expr, $type: expr, $field: expr, required) => {
BigSize($type).write(&mut $len).expect("No in-memory data may fail to serialize");
let field_len = $field.serialized_length();
BigSize(field_len as u64).write(&mut $len).expect("No in-memory data may fail to serialize");
$len.0 += field_len;
};
($len: expr, $type: expr, $field: expr, vec_type) => {
get_varint_length_prefixed_tlv_length!($len, $type, ::util::ser::VecWriteWrapper(&$field), required);
};
($len: expr, $optional_type: expr, $optional_field: expr, option) => {
if let Some(ref field) = $optional_field {
BigSize($optional_type).write(&mut $len).expect("No in-memory data may fail to serialize");
let field_len = field.serialized_length();
BigSize(field_len as u64).write(&mut $len).expect("No in-memory data may fail to serialize");
$len.0 += field_len;
}
};
}
macro_rules! encode_varint_length_prefixed_tlv {
($stream: expr, {$(($type: expr, $field: expr, $fieldty: tt)),*}) => { {
use util::ser::BigSize;
let len = {
#[allow(unused_mut)]
let mut len = ::util::ser::LengthCalculatingWriter(0);
$(
get_varint_length_prefixed_tlv_length!(len, $type, $field, $fieldty);
)*
len.0
};
BigSize(len as u64).write($stream)?;
encode_tlv_stream!($stream, { $(($type, $field, $fieldty)),* });
} }
}
macro_rules! check_tlv_order {
($last_seen_type: expr, $typ: expr, $type: expr, $field: ident, (default_value, $default: expr)) => {{
#[allow(unused_comparisons)] // Note that $type may be 0 making the second comparison always true
let invalid_order = ($last_seen_type.is_none() || $last_seen_type.unwrap() < $type) && $typ.0 > $type;
if invalid_order {
$field = $default;
}
}};
($last_seen_type: expr, $typ: expr, $type: expr, $field: ident, required) => {{
#[allow(unused_comparisons)] // Note that $type may be 0 making the second comparison always true
let invalid_order = ($last_seen_type.is_none() || $last_seen_type.unwrap() < $type) && $typ.0 > $type;
if invalid_order {
return Err(DecodeError::InvalidValue);
}
}};
($last_seen_type: expr, $typ: expr, $type: expr, $field: ident, option) => {{
// no-op
}};
($last_seen_type: expr, $typ: expr, $type: expr, $field: ident, vec_type) => {{
// no-op
}};
($last_seen_type: expr, $typ: expr, $type: expr, $field: ident, ignorable) => {{
// no-op
}};
}
macro_rules! check_missing_tlv {
($last_seen_type: expr, $type: expr, $field: ident, (default_value, $default: expr)) => {{
#[allow(unused_comparisons)] // Note that $type may be 0 making the second comparison always true
let missing_req_type = $last_seen_type.is_none() || $last_seen_type.unwrap() < $type;
if missing_req_type {
$field = $default;
}
}};
($last_seen_type: expr, $type: expr, $field: ident, required) => {{
#[allow(unused_comparisons)] // Note that $type may be 0 making the second comparison always true
let missing_req_type = $last_seen_type.is_none() || $last_seen_type.unwrap() < $type;
if missing_req_type {
return Err(DecodeError::InvalidValue);
}
}};
($last_seen_type: expr, $type: expr, $field: ident, vec_type) => {{
// no-op
}};
($last_seen_type: expr, $type: expr, $field: ident, option) => {{
// no-op
}};
($last_seen_type: expr, $type: expr, $field: ident, ignorable) => {{
// no-op
}};
}
macro_rules! decode_tlv {
($reader: expr, $field: ident, (default_value, $default: expr)) => {{
decode_tlv!($reader, $field, required)
}};
($reader: expr, $field: ident, required) => {{
$field = ser::Readable::read(&mut $reader)?;
}};
($reader: expr, $field: ident, vec_type) => {{
let f: ::util::ser::VecReadWrapper<_> = ser::Readable::read(&mut $reader)?;
$field = Some(f.0);
}};
($reader: expr, $field: ident, option) => {{
$field = Some(ser::Readable::read(&mut $reader)?);
}};
($reader: expr, $field: ident, ignorable) => {{
$field = ser::MaybeReadable::read(&mut $reader)?;
}};
}
macro_rules! decode_tlv_stream {
($stream: expr, {$(($type: expr, $field: ident, $fieldty: tt)),* $(,)*}) => { {
use ln::msgs::DecodeError;
let mut last_seen_type: Option<u64> = None;
let mut stream_ref = $stream;
'tlv_read: loop {
use util::ser;
// First decode the type of this TLV:
let typ: ser::BigSize = {
// We track whether any bytes were read during the consensus_decode call to
// determine whether we should break or return ShortRead if we get an
// UnexpectedEof. This should in every case be largely cosmetic, but its nice to
// pass the TLV test vectors exactly, which requre this distinction.
let mut tracking_reader = ser::ReadTrackingReader::new(&mut stream_ref);
match ser::Readable::read(&mut tracking_reader) {
Err(DecodeError::ShortRead) => {
if !tracking_reader.have_read {
break 'tlv_read;
} else {
return Err(DecodeError::ShortRead);
}
},
Err(e) => return Err(e),
Ok(t) => t,
}
};
// Types must be unique and monotonically increasing:
match last_seen_type {
Some(t) if typ.0 <= t => {
return Err(DecodeError::InvalidValue);
},
_ => {},
}
// As we read types, make sure we hit every required type:
$({
check_tlv_order!(last_seen_type, typ, $type, $field, $fieldty);
})*
last_seen_type = Some(typ.0);
// Finally, read the length and value itself:
let length: ser::BigSize = ser::Readable::read(&mut stream_ref)?;
let mut s = ser::FixedLengthReader::new(&mut stream_ref, length.0);
match typ.0 {
$($type => {
decode_tlv!(s, $field, $fieldty);
if s.bytes_remain() {
s.eat_remaining()?; // Return ShortRead if there's actually not enough bytes
return Err(DecodeError::InvalidValue);
}
},)*
x if x % 2 == 0 => {
return Err(DecodeError::UnknownRequiredFeature);
},
_ => {},
}
s.eat_remaining()?;
}
// Make sure we got to each required type after we've read every TLV:
$({
check_missing_tlv!(last_seen_type, $type, $field, $fieldty);
})*
} }
}
macro_rules! impl_writeable_msg {
($st:ident, {$($field:ident),* $(,)*}, {$(($type: expr, $tlvfield: ident, $fieldty: tt)),* $(,)*}) => {
impl ::util::ser::Writeable for $st {
fn write<W: ::util::ser::Writer>(&self, w: &mut W) -> Result<(), $crate::io::Error> {
$( self.$field.write(w)?; )*
encode_tlv_stream!(w, {$(($type, self.$tlvfield, $fieldty)),*});
Ok(())
}
}
impl ::util::ser::Readable for $st {
fn read<R: $crate::io::Read>(r: &mut R) -> Result<Self, ::ln::msgs::DecodeError> {
$(let $field = ::util::ser::Readable::read(r)?;)*
$(init_tlv_field_var!($tlvfield, $fieldty);)*
decode_tlv_stream!(r, {$(($type, $tlvfield, $fieldty)),*});
Ok(Self {
$($field),*,
$($tlvfield),*
})
}
}
}
}
macro_rules! impl_writeable {
($st:ident, {$($field:ident),*}) => {
impl ::util::ser::Writeable for $st {
fn write<W: ::util::ser::Writer>(&self, w: &mut W) -> Result<(), $crate::io::Error> {
$( self.$field.write(w)?; )*
Ok(())
}
#[inline]
fn serialized_length(&self) -> usize {
let mut len_calc = 0;
$( len_calc += self.$field.serialized_length(); )*
return len_calc;
}
}
impl ::util::ser::Readable for $st {
fn read<R: $crate::io::Read>(r: &mut R) -> Result<Self, ::ln::msgs::DecodeError> {
Ok(Self {
$($field: ::util::ser::Readable::read(r)?),*
})
}
}
}
}
/// Write out two bytes to indicate the version of an object.
/// $this_version represents a unique version of a type. Incremented whenever the type's
/// serialization format has changed or has a new interpretation. Used by a type's
/// reader to determine how to interpret fields or if it can understand a serialized
/// object.
/// $min_version_that_can_read_this is the minimum reader version which can understand this
/// serialized object. Previous versions will simply err with a
/// DecodeError::UnknownVersion.
///
/// Updates to either $this_version or $min_version_that_can_read_this should be included in
/// release notes.
///
/// Both version fields can be specific to this type of object.
macro_rules! write_ver_prefix {
($stream: expr, $this_version: expr, $min_version_that_can_read_this: expr) => {
$stream.write_all(&[$this_version; 1])?;
$stream.write_all(&[$min_version_that_can_read_this; 1])?;
}
}
/// Writes out a suffix to an object which contains potentially backwards-compatible, optional
/// fields which old nodes can happily ignore.
///
/// It is written out in TLV format and, as with all TLV fields, unknown even fields cause a
/// DecodeError::UnknownRequiredFeature error, with unknown odd fields ignored.
///
/// This is the preferred method of adding new fields that old nodes can ignore and still function
/// correctly.
macro_rules! write_tlv_fields {
($stream: expr, {$(($type: expr, $field: expr, $fieldty: tt)),* $(,)*}) => {
encode_varint_length_prefixed_tlv!($stream, {$(($type, $field, $fieldty)),*})
}
}
/// Reads a prefix added by write_ver_prefix!(), above. Takes the current version of the
/// serialization logic for this object. This is compared against the
/// $min_version_that_can_read_this added by write_ver_prefix!().
macro_rules! read_ver_prefix {
($stream: expr, $this_version: expr) => { {
let ver: u8 = Readable::read($stream)?;
let min_ver: u8 = Readable::read($stream)?;
if min_ver > $this_version {
return Err(DecodeError::UnknownVersion);
}
ver
} }
}
/// Reads a suffix added by write_tlv_fields.
macro_rules! read_tlv_fields {
($stream: expr, {$(($type: expr, $field: ident, $fieldty: tt)),* $(,)*}) => { {
let tlv_len: ::util::ser::BigSize = ::util::ser::Readable::read($stream)?;
let mut rd = ::util::ser::FixedLengthReader::new($stream, tlv_len.0);
decode_tlv_stream!(&mut rd, {$(($type, $field, $fieldty)),*});
rd.eat_remaining().map_err(|_| ::ln::msgs::DecodeError::ShortRead)?;
} }
}
macro_rules! init_tlv_based_struct_field {
($field: ident, (default_value, $default: expr)) => {
$field
};
($field: ident, option) => {
$field
};
($field: ident, required) => {
$field.0.unwrap()
};
($field: ident, vec_type) => {
$field.unwrap()
};
}
macro_rules! init_tlv_field_var {
($field: ident, (default_value, $default: expr)) => {
let mut $field = $default;
};
($field: ident, required) => {
let mut $field = ::util::ser::OptionDeserWrapper(None);
};
($field: ident, vec_type) => {
let mut $field = Some(Vec::new());
};
($field: ident, option) => {
let mut $field = None;
};
}
/// Implements Readable/Writeable for a struct storing it as a set of TLVs
/// If $fieldty is `required`, then $field is a required field that is not an Option nor a Vec.
/// If $fieldty is `option`, then $field is optional field.
/// if $fieldty is `vec_type`, then $field is a Vec, which needs to have its individual elements
/// serialized.
macro_rules! impl_writeable_tlv_based {
($st: ident, {$(($type: expr, $field: ident, $fieldty: tt)),* $(,)*}) => {
impl ::util::ser::Writeable for $st {
fn write<W: ::util::ser::Writer>(&self, writer: &mut W) -> Result<(), $crate::io::Error> {
write_tlv_fields!(writer, {
$(($type, self.$field, $fieldty)),*
});
Ok(())
}
#[inline]
fn serialized_length(&self) -> usize {
use util::ser::BigSize;
let len = {
#[allow(unused_mut)]
let mut len = ::util::ser::LengthCalculatingWriter(0);
$(
get_varint_length_prefixed_tlv_length!(len, $type, self.$field, $fieldty);
)*
len.0
};
let mut len_calc = ::util::ser::LengthCalculatingWriter(0);
BigSize(len as u64).write(&mut len_calc).expect("No in-memory data may fail to serialize");
len + len_calc.0
}
}
impl ::util::ser::Readable for $st {
fn read<R: $crate::io::Read>(reader: &mut R) -> Result<Self, ::ln::msgs::DecodeError> {
$(
init_tlv_field_var!($field, $fieldty);
)*
read_tlv_fields!(reader, {
$(($type, $field, $fieldty)),*
});
Ok(Self {
$(
$field: init_tlv_based_struct_field!($field, $fieldty)
),*
})
}
}
}
}
macro_rules! _impl_writeable_tlv_based_enum_common {
($st: ident, $(($variant_id: expr, $variant_name: ident) =>
{$(($type: expr, $field: ident, $fieldty: tt)),* $(,)*}
),* $(,)*;
$(($tuple_variant_id: expr, $tuple_variant_name: ident)),* $(,)*) => {
impl ::util::ser::Writeable for $st {
fn write<W: ::util::ser::Writer>(&self, writer: &mut W) -> Result<(), $crate::io::Error> {
match self {
$($st::$variant_name { $(ref $field),* } => {
let id: u8 = $variant_id;
id.write(writer)?;
write_tlv_fields!(writer, {
$(($type, $field, $fieldty)),*
});
}),*
$($st::$tuple_variant_name (ref field) => {
let id: u8 = $tuple_variant_id;
id.write(writer)?;
field.write(writer)?;
}),*
}
Ok(())
}
}
}
}
/// Implement MaybeReadable and Writeable for an enum, with struct variants stored as TLVs and
/// tuple variants stored directly.
///
/// This is largely identical to `impl_writeable_tlv_based_enum`, except that odd variants will
/// return `Ok(None)` instead of `Err(UnknownRequiredFeature)`. It should generally be preferred
/// when `MaybeReadable` is practical instead of just `Readable` as it provides an upgrade path for
/// new variants to be added which are simply ignored by existing clients.
macro_rules! impl_writeable_tlv_based_enum_upgradable {
($st: ident, $(($variant_id: expr, $variant_name: ident) =>
{$(($type: expr, $field: ident, $fieldty: tt)),* $(,)*}
),* $(,)*
$(;
$(($tuple_variant_id: expr, $tuple_variant_name: ident)),* $(,)*)*) => {
_impl_writeable_tlv_based_enum_common!($st,
$(($variant_id, $variant_name) => {$(($type, $field, $fieldty)),*}),*;
$($(($tuple_variant_id, $tuple_variant_name)),*)*);
impl ::util::ser::MaybeReadable for $st {
fn read<R: $crate::io::Read>(reader: &mut R) -> Result<Option<Self>, ::ln::msgs::DecodeError> {
let id: u8 = ::util::ser::Readable::read(reader)?;
match id {
$($variant_id => {
// Because read_tlv_fields creates a labeled loop, we cannot call it twice
// in the same function body. Instead, we define a closure and call it.
let f = || {
$(
init_tlv_field_var!($field, $fieldty);
)*
read_tlv_fields!(reader, {
$(($type, $field, $fieldty)),*
});
Ok(Some($st::$variant_name {
$(
$field: init_tlv_based_struct_field!($field, $fieldty)
),*
}))
};
f()
}),*
$($($tuple_variant_id => {
Ok(Some($st::$tuple_variant_name(Readable::read(reader)?)))
}),*)*
_ if id % 2 == 1 => Ok(None),
_ => Err(DecodeError::UnknownRequiredFeature),
}
}
}
}
}
/// Implement Readable and Writeable for an enum, with struct variants stored as TLVs and tuple
/// variants stored directly.
/// The format is, for example
/// impl_writeable_tlv_based_enum!(EnumName,
/// (0, StructVariantA) => {(0, required_variant_field, required), (1, optional_variant_field, option)},
/// (1, StructVariantB) => {(0, variant_field_a, required), (1, variant_field_b, required), (2, variant_vec_field, vec_type)};
/// (2, TupleVariantA), (3, TupleVariantB),
/// );
/// The type is written as a single byte, followed by any variant data.
/// Attempts to read an unknown type byte result in DecodeError::UnknownRequiredFeature.
macro_rules! impl_writeable_tlv_based_enum {
($st: ident, $(($variant_id: expr, $variant_name: ident) =>
{$(($type: expr, $field: ident, $fieldty: tt)),* $(,)*}
),* $(,)*;
$(($tuple_variant_id: expr, $tuple_variant_name: ident)),* $(,)*) => {
_impl_writeable_tlv_based_enum_common!($st,
$(($variant_id, $variant_name) => {$(($type, $field, $fieldty)),*}),*;
$(($tuple_variant_id, $tuple_variant_name)),*);
impl ::util::ser::Readable for $st {
fn read<R: $crate::io::Read>(reader: &mut R) -> Result<Self, ::ln::msgs::DecodeError> {
let id: u8 = ::util::ser::Readable::read(reader)?;
match id {
$($variant_id => {
// Because read_tlv_fields creates a labeled loop, we cannot call it twice
// in the same function body. Instead, we define a closure and call it.
let f = || {
$(
init_tlv_field_var!($field, $fieldty);
)*
read_tlv_fields!(reader, {
$(($type, $field, $fieldty)),*
});
Ok($st::$variant_name {
$(
$field: init_tlv_based_struct_field!($field, $fieldty)
),*
})
};
f()
}),*
$($tuple_variant_id => {
Ok($st::$tuple_variant_name(Readable::read(reader)?))
}),*
_ => {
Err(DecodeError::UnknownRequiredFeature)
},
}
}
}
}
}
#[cfg(test)]
mod tests {
use io::{self, Cursor};
use prelude::*;
use ln::msgs::DecodeError;
use util::ser::{Writeable, HighZeroBytesDroppedVarInt, VecWriter};
use bitcoin::secp256k1::PublicKey;
// The BOLT TLV test cases don't include any tests which use our "required-value" logic since
// the encoding layer in the BOLTs has no such concept, though it makes our macros easier to
// work with so they're baked into the decoder. Thus, we have a few additional tests below
fn tlv_reader(s: &[u8]) -> Result<(u64, u32, Option<u32>), DecodeError> {
let mut s = Cursor::new(s);
let mut a: u64 = 0;
let mut b: u32 = 0;
let mut c: Option<u32> = None;
decode_tlv_stream!(&mut s, {(2, a, required), (3, b, required), (4, c, option)});
Ok((a, b, c))
}
#[test]
fn tlv_v_short_read() {
// We only expect a u32 for type 3 (which we are given), but the L says its 8 bytes.
if let Err(DecodeError::ShortRead) = tlv_reader(&::hex::decode(
concat!("0100", "0208deadbeef1badbeef", "0308deadbeef")
).unwrap()[..]) {
} else { panic!(); }
}
#[test]
fn tlv_types_out_of_order() {
if let Err(DecodeError::InvalidValue) = tlv_reader(&::hex::decode(
concat!("0100", "0304deadbeef", "0208deadbeef1badbeef")
).unwrap()[..]) {
} else { panic!(); }
// ...even if its some field we don't understand
if let Err(DecodeError::InvalidValue) = tlv_reader(&::hex::decode(
concat!("0208deadbeef1badbeef", "0100", "0304deadbeef")
).unwrap()[..]) {
} else { panic!(); }
}
#[test]
fn tlv_req_type_missing_or_extra() {
// It's also bad if they included even fields we don't understand
if let Err(DecodeError::UnknownRequiredFeature) = tlv_reader(&::hex::decode(
concat!("0100", "0208deadbeef1badbeef", "0304deadbeef", "0600")
).unwrap()[..]) {
} else { panic!(); }
// ... or if they're missing fields we need
if let Err(DecodeError::InvalidValue) = tlv_reader(&::hex::decode(
concat!("0100", "0208deadbeef1badbeef")
).unwrap()[..]) {
} else { panic!(); }
// ... even if that field is even
if let Err(DecodeError::InvalidValue) = tlv_reader(&::hex::decode(
concat!("0304deadbeef", "0500")
).unwrap()[..]) {
} else { panic!(); }
}
#[test]
fn tlv_simple_good_cases() {
assert_eq!(tlv_reader(&::hex::decode(
concat!("0208deadbeef1badbeef", "03041bad1dea")
).unwrap()[..]).unwrap(),
(0xdeadbeef1badbeef, 0x1bad1dea, None));
assert_eq!(tlv_reader(&::hex::decode(
concat!("0208deadbeef1badbeef", "03041bad1dea", "040401020304")
).unwrap()[..]).unwrap(),
(0xdeadbeef1badbeef, 0x1bad1dea, Some(0x01020304)));
}
// BOLT TLV test cases
fn tlv_reader_n1(s: &[u8]) -> Result<(Option<HighZeroBytesDroppedVarInt<u64>>, Option<u64>, Option<(PublicKey, u64, u64)>, Option<u16>), DecodeError> {
let mut s = Cursor::new(s);
let mut tlv1: Option<HighZeroBytesDroppedVarInt<u64>> = None;
let mut tlv2: Option<u64> = None;
let mut tlv3: Option<(PublicKey, u64, u64)> = None;
let mut tlv4: Option<u16> = None;
decode_tlv_stream!(&mut s, {(1, tlv1, option), (2, tlv2, option), (3, tlv3, option), (254, tlv4, option)});
Ok((tlv1, tlv2, tlv3, tlv4))
}
#[test]
fn bolt_tlv_bogus_stream() {
macro_rules! do_test {
($stream: expr, $reason: ident) => {
if let Err(DecodeError::$reason) = tlv_reader_n1(&::hex::decode($stream).unwrap()[..]) {
} else { panic!(); }
}
}
// TLVs from the BOLT test cases which should not decode as either n1 or n2
do_test!(concat!("fd01"), ShortRead);
do_test!(concat!("fd0001", "00"), InvalidValue);
do_test!(concat!("fd0101"), ShortRead);
do_test!(concat!("0f", "fd"), ShortRead);
do_test!(concat!("0f", "fd26"), ShortRead);
do_test!(concat!("0f", "fd2602"), ShortRead);
do_test!(concat!("0f", "fd0001", "00"), InvalidValue);
do_test!(concat!("0f", "fd0201", "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"), ShortRead);
do_test!(concat!("12", "00"), UnknownRequiredFeature);
do_test!(concat!("fd0102", "00"), UnknownRequiredFeature);
do_test!(concat!("fe01000002", "00"), UnknownRequiredFeature);
do_test!(concat!("ff0100000000000002", "00"), UnknownRequiredFeature);
}
#[test]
fn bolt_tlv_bogus_n1_stream() {
macro_rules! do_test {
($stream: expr, $reason: ident) => {
if let Err(DecodeError::$reason) = tlv_reader_n1(&::hex::decode($stream).unwrap()[..]) {
} else { panic!(); }
}
}
// TLVs from the BOLT test cases which should not decode as n1
do_test!(concat!("01", "09", "ffffffffffffffffff"), InvalidValue);
do_test!(concat!("01", "01", "00"), InvalidValue);
do_test!(concat!("01", "02", "0001"), InvalidValue);
do_test!(concat!("01", "03", "000100"), InvalidValue);
do_test!(concat!("01", "04", "00010000"), InvalidValue);
do_test!(concat!("01", "05", "0001000000"), InvalidValue);
do_test!(concat!("01", "06", "000100000000"), InvalidValue);
do_test!(concat!("01", "07", "00010000000000"), InvalidValue);
do_test!(concat!("01", "08", "0001000000000000"), InvalidValue);
do_test!(concat!("02", "07", "01010101010101"), ShortRead);
do_test!(concat!("02", "09", "010101010101010101"), InvalidValue);
do_test!(concat!("03", "21", "023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb"), ShortRead);
do_test!(concat!("03", "29", "023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb0000000000000001"), ShortRead);
do_test!(concat!("03", "30", "023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb000000000000000100000000000001"), ShortRead);
do_test!(concat!("03", "31", "043da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb00000000000000010000000000000002"), InvalidValue);
do_test!(concat!("03", "32", "023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb0000000000000001000000000000000001"), InvalidValue);
do_test!(concat!("fd00fe", "00"), ShortRead);
do_test!(concat!("fd00fe", "01", "01"), ShortRead);
do_test!(concat!("fd00fe", "03", "010101"), InvalidValue);
do_test!(concat!("00", "00"), UnknownRequiredFeature);
do_test!(concat!("02", "08", "0000000000000226", "01", "01", "2a"), InvalidValue);
do_test!(concat!("02", "08", "0000000000000231", "02", "08", "0000000000000451"), InvalidValue);
do_test!(concat!("1f", "00", "0f", "01", "2a"), InvalidValue);
do_test!(concat!("1f", "00", "1f", "01", "2a"), InvalidValue);
// The last BOLT test modified to not require creating a new decoder for one trivial test.
do_test!(concat!("ffffffffffffffffff", "00", "01", "00"), InvalidValue);
}
#[test]
fn bolt_tlv_valid_n1_stream() {
macro_rules! do_test {
($stream: expr, $tlv1: expr, $tlv2: expr, $tlv3: expr, $tlv4: expr) => {
if let Ok((tlv1, tlv2, tlv3, tlv4)) = tlv_reader_n1(&::hex::decode($stream).unwrap()[..]) {
assert_eq!(tlv1.map(|v| v.0), $tlv1);
assert_eq!(tlv2, $tlv2);
assert_eq!(tlv3, $tlv3);
assert_eq!(tlv4, $tlv4);
} else { panic!(); }
}
}
do_test!(concat!(""), None, None, None, None);
do_test!(concat!("21", "00"), None, None, None, None);
do_test!(concat!("fd0201", "00"), None, None, None, None);
do_test!(concat!("fd00fd", "00"), None, None, None, None);
do_test!(concat!("fd00ff", "00"), None, None, None, None);
do_test!(concat!("fe02000001", "00"), None, None, None, None);
do_test!(concat!("ff0200000000000001", "00"), None, None, None, None);
do_test!(concat!("01", "00"), Some(0), None, None, None);
do_test!(concat!("01", "01", "01"), Some(1), None, None, None);
do_test!(concat!("01", "02", "0100"), Some(256), None, None, None);
do_test!(concat!("01", "03", "010000"), Some(65536), None, None, None);
do_test!(concat!("01", "04", "01000000"), Some(16777216), None, None, None);
do_test!(concat!("01", "05", "0100000000"), Some(4294967296), None, None, None);
do_test!(concat!("01", "06", "010000000000"), Some(1099511627776), None, None, None);
do_test!(concat!("01", "07", "01000000000000"), Some(281474976710656), None, None, None);
do_test!(concat!("01", "08", "0100000000000000"), Some(72057594037927936), None, None, None);
do_test!(concat!("02", "08", "0000000000000226"), None, Some((0 << 30) | (0 << 5) | (550 << 0)), None, None);
do_test!(concat!("03", "31", "023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb00000000000000010000000000000002"),
None, None, Some((
PublicKey::from_slice(&::hex::decode("023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb").unwrap()[..]).unwrap(), 1, 2)),
None);
do_test!(concat!("fd00fe", "02", "0226"), None, None, None, Some(550));
}
fn do_simple_test_tlv_write() -> Result<(), io::Error> {
let mut stream = VecWriter(Vec::new());
stream.0.clear();
encode_varint_length_prefixed_tlv!(&mut stream, {(1, 1u8, required), (42, None::<u64>, option)});
assert_eq!(stream.0, ::hex::decode("03010101").unwrap());
stream.0.clear();
encode_varint_length_prefixed_tlv!(&mut stream, {(1, Some(1u8), option)});
assert_eq!(stream.0, ::hex::decode("03010101").unwrap());
stream.0.clear();
encode_varint_length_prefixed_tlv!(&mut stream, {(4, 0xabcdu16, required), (42, None::<u64>, option)});
assert_eq!(stream.0, ::hex::decode("040402abcd").unwrap());
stream.0.clear();
encode_varint_length_prefixed_tlv!(&mut stream, {(42, None::<u64>, option), (0xff, 0xabcdu16, required)});
assert_eq!(stream.0, ::hex::decode("06fd00ff02abcd").unwrap());
stream.0.clear();
encode_varint_length_prefixed_tlv!(&mut stream, {(0, 1u64, required), (42, None::<u64>, option), (0xff, HighZeroBytesDroppedVarInt(0u64), required)});
assert_eq!(stream.0, ::hex::decode("0e00080000000000000001fd00ff00").unwrap());
stream.0.clear();
encode_varint_length_prefixed_tlv!(&mut stream, {(0, Some(1u64), option), (0xff, HighZeroBytesDroppedVarInt(0u64), required)});
assert_eq!(stream.0, ::hex::decode("0e00080000000000000001fd00ff00").unwrap());
Ok(())
}
#[test]
fn simple_test_tlv_write() {
do_simple_test_tlv_write().unwrap();
}
}
| 37.696382 | 567 | 0.641841 |
2956de5c05cd56b17d92e2309b0fa2f11252db87 | 11,984 | /*
* Strava API v3
*
* The [Swagger Playground](https://developers.strava.com/playground) is the easiest way to familiarize yourself with the Strava API by submitting HTTP requests and observing the responses before you write any client code. It will show what a response will look like with different endpoints depending on the authorization scope you receive from your athletes. To use the Playground, go to https://www.strava.com/settings/api and change your “Authorization Callback Domain” to developers.strava.com. Please note, we only support Swagger 2.0. There is a known issue where you can only select one scope at a time. For more information, please check the section “client code” at https://developers.strava.com/docs.
*
* OpenAPI spec version: 3.0.0
*
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*/
#[allow(unused_imports)]
use serde_json::Value;
#[derive(Debug, Serialize, Deserialize)]
pub struct DetailedAthlete {
/// The unique identifier of the athlete
#[serde(rename = "id")]
id: Option<i32>,
/// Resource state, indicates level of detail. Possible values: 1 -> \"meta\", 2 -> \"summary\", 3 -> \"detail\"
#[serde(rename = "resource_state")]
resource_state: Option<i32>,
/// The athlete's first name.
#[serde(rename = "firstname")]
firstname: Option<String>,
/// The athlete's last name.
#[serde(rename = "lastname")]
lastname: Option<String>,
/// URL to a 62x62 pixel profile picture.
#[serde(rename = "profile_medium")]
profile_medium: Option<String>,
/// URL to a 124x124 pixel profile picture.
#[serde(rename = "profile")]
profile: Option<String>,
/// The athlete's city.
#[serde(rename = "city")]
city: Option<String>,
/// The athlete's state or geographical region.
#[serde(rename = "state")]
state: Option<String>,
/// The athlete's country.
#[serde(rename = "country")]
country: Option<String>,
/// The athlete's sex.
#[serde(rename = "sex")]
sex: Option<String>,
/// Deprecated. Use summit field instead. Whether the athlete has any Summit subscription.
#[serde(rename = "premium")]
premium: Option<bool>,
/// Whether the athlete has any Summit subscription.
#[serde(rename = "summit")]
summit: Option<bool>,
/// The time at which the athlete was created.
#[serde(rename = "created_at")]
created_at: Option<String>,
/// The time at which the athlete was last updated.
#[serde(rename = "updated_at")]
updated_at: Option<String>,
/// The athlete's follower count.
#[serde(rename = "follower_count")]
follower_count: Option<i32>,
/// The athlete's friend count.
#[serde(rename = "friend_count")]
friend_count: Option<i32>,
/// The athlete's preferred unit system.
#[serde(rename = "measurement_preference")]
measurement_preference: Option<String>,
/// The athlete's FTP (Functional Threshold Power).
#[serde(rename = "ftp")]
ftp: Option<i32>,
/// The athlete's weight.
#[serde(rename = "weight")]
weight: Option<f32>,
/// The athlete's clubs.
#[serde(rename = "clubs")]
clubs: Option<Vec<::models::SummaryClub>>,
/// The athlete's bikes.
#[serde(rename = "bikes")]
bikes: Option<Vec<::models::SummaryGear>>,
/// The athlete's shoes.
#[serde(rename = "shoes")]
shoes: Option<Vec<::models::SummaryGear>>
}
impl DetailedAthlete {
pub fn new() -> DetailedAthlete {
DetailedAthlete {
id: None,
resource_state: None,
firstname: None,
lastname: None,
profile_medium: None,
profile: None,
city: None,
state: None,
country: None,
sex: None,
premium: None,
summit: None,
created_at: None,
updated_at: None,
follower_count: None,
friend_count: None,
measurement_preference: None,
ftp: None,
weight: None,
clubs: None,
bikes: None,
shoes: None
}
}
pub fn set_id(&mut self, id: i32) {
self.id = Some(id);
}
pub fn with_id(mut self, id: i32) -> DetailedAthlete {
self.id = Some(id);
self
}
pub fn id(&self) -> Option<&i32> {
self.id.as_ref()
}
pub fn reset_id(&mut self) {
self.id = None;
}
pub fn set_resource_state(&mut self, resource_state: i32) {
self.resource_state = Some(resource_state);
}
pub fn with_resource_state(mut self, resource_state: i32) -> DetailedAthlete {
self.resource_state = Some(resource_state);
self
}
pub fn resource_state(&self) -> Option<&i32> {
self.resource_state.as_ref()
}
pub fn reset_resource_state(&mut self) {
self.resource_state = None;
}
pub fn set_firstname(&mut self, firstname: String) {
self.firstname = Some(firstname);
}
pub fn with_firstname(mut self, firstname: String) -> DetailedAthlete {
self.firstname = Some(firstname);
self
}
pub fn firstname(&self) -> Option<&String> {
self.firstname.as_ref()
}
pub fn reset_firstname(&mut self) {
self.firstname = None;
}
pub fn set_lastname(&mut self, lastname: String) {
self.lastname = Some(lastname);
}
pub fn with_lastname(mut self, lastname: String) -> DetailedAthlete {
self.lastname = Some(lastname);
self
}
pub fn lastname(&self) -> Option<&String> {
self.lastname.as_ref()
}
pub fn reset_lastname(&mut self) {
self.lastname = None;
}
pub fn set_profile_medium(&mut self, profile_medium: String) {
self.profile_medium = Some(profile_medium);
}
pub fn with_profile_medium(mut self, profile_medium: String) -> DetailedAthlete {
self.profile_medium = Some(profile_medium);
self
}
pub fn profile_medium(&self) -> Option<&String> {
self.profile_medium.as_ref()
}
pub fn reset_profile_medium(&mut self) {
self.profile_medium = None;
}
pub fn set_profile(&mut self, profile: String) {
self.profile = Some(profile);
}
pub fn with_profile(mut self, profile: String) -> DetailedAthlete {
self.profile = Some(profile);
self
}
pub fn profile(&self) -> Option<&String> {
self.profile.as_ref()
}
pub fn reset_profile(&mut self) {
self.profile = None;
}
pub fn set_city(&mut self, city: String) {
self.city = Some(city);
}
pub fn with_city(mut self, city: String) -> DetailedAthlete {
self.city = Some(city);
self
}
pub fn city(&self) -> Option<&String> {
self.city.as_ref()
}
pub fn reset_city(&mut self) {
self.city = None;
}
pub fn set_state(&mut self, state: String) {
self.state = Some(state);
}
pub fn with_state(mut self, state: String) -> DetailedAthlete {
self.state = Some(state);
self
}
pub fn state(&self) -> Option<&String> {
self.state.as_ref()
}
pub fn reset_state(&mut self) {
self.state = None;
}
pub fn set_country(&mut self, country: String) {
self.country = Some(country);
}
pub fn with_country(mut self, country: String) -> DetailedAthlete {
self.country = Some(country);
self
}
pub fn country(&self) -> Option<&String> {
self.country.as_ref()
}
pub fn reset_country(&mut self) {
self.country = None;
}
pub fn set_sex(&mut self, sex: String) {
self.sex = Some(sex);
}
pub fn with_sex(mut self, sex: String) -> DetailedAthlete {
self.sex = Some(sex);
self
}
pub fn sex(&self) -> Option<&String> {
self.sex.as_ref()
}
pub fn reset_sex(&mut self) {
self.sex = None;
}
pub fn set_premium(&mut self, premium: bool) {
self.premium = Some(premium);
}
pub fn with_premium(mut self, premium: bool) -> DetailedAthlete {
self.premium = Some(premium);
self
}
pub fn premium(&self) -> Option<&bool> {
self.premium.as_ref()
}
pub fn reset_premium(&mut self) {
self.premium = None;
}
pub fn set_summit(&mut self, summit: bool) {
self.summit = Some(summit);
}
pub fn with_summit(mut self, summit: bool) -> DetailedAthlete {
self.summit = Some(summit);
self
}
pub fn summit(&self) -> Option<&bool> {
self.summit.as_ref()
}
pub fn reset_summit(&mut self) {
self.summit = None;
}
pub fn set_created_at(&mut self, created_at: String) {
self.created_at = Some(created_at);
}
pub fn with_created_at(mut self, created_at: String) -> DetailedAthlete {
self.created_at = Some(created_at);
self
}
pub fn created_at(&self) -> Option<&String> {
self.created_at.as_ref()
}
pub fn reset_created_at(&mut self) {
self.created_at = None;
}
pub fn set_updated_at(&mut self, updated_at: String) {
self.updated_at = Some(updated_at);
}
pub fn with_updated_at(mut self, updated_at: String) -> DetailedAthlete {
self.updated_at = Some(updated_at);
self
}
pub fn updated_at(&self) -> Option<&String> {
self.updated_at.as_ref()
}
pub fn reset_updated_at(&mut self) {
self.updated_at = None;
}
pub fn set_follower_count(&mut self, follower_count: i32) {
self.follower_count = Some(follower_count);
}
pub fn with_follower_count(mut self, follower_count: i32) -> DetailedAthlete {
self.follower_count = Some(follower_count);
self
}
pub fn follower_count(&self) -> Option<&i32> {
self.follower_count.as_ref()
}
pub fn reset_follower_count(&mut self) {
self.follower_count = None;
}
pub fn set_friend_count(&mut self, friend_count: i32) {
self.friend_count = Some(friend_count);
}
pub fn with_friend_count(mut self, friend_count: i32) -> DetailedAthlete {
self.friend_count = Some(friend_count);
self
}
pub fn friend_count(&self) -> Option<&i32> {
self.friend_count.as_ref()
}
pub fn reset_friend_count(&mut self) {
self.friend_count = None;
}
pub fn set_measurement_preference(&mut self, measurement_preference: String) {
self.measurement_preference = Some(measurement_preference);
}
pub fn with_measurement_preference(mut self, measurement_preference: String) -> DetailedAthlete {
self.measurement_preference = Some(measurement_preference);
self
}
pub fn measurement_preference(&self) -> Option<&String> {
self.measurement_preference.as_ref()
}
pub fn reset_measurement_preference(&mut self) {
self.measurement_preference = None;
}
pub fn set_ftp(&mut self, ftp: i32) {
self.ftp = Some(ftp);
}
pub fn with_ftp(mut self, ftp: i32) -> DetailedAthlete {
self.ftp = Some(ftp);
self
}
pub fn ftp(&self) -> Option<&i32> {
self.ftp.as_ref()
}
pub fn reset_ftp(&mut self) {
self.ftp = None;
}
pub fn set_weight(&mut self, weight: f32) {
self.weight = Some(weight);
}
pub fn with_weight(mut self, weight: f32) -> DetailedAthlete {
self.weight = Some(weight);
self
}
pub fn weight(&self) -> Option<&f32> {
self.weight.as_ref()
}
pub fn reset_weight(&mut self) {
self.weight = None;
}
pub fn set_clubs(&mut self, clubs: Vec<::models::SummaryClub>) {
self.clubs = Some(clubs);
}
pub fn with_clubs(mut self, clubs: Vec<::models::SummaryClub>) -> DetailedAthlete {
self.clubs = Some(clubs);
self
}
pub fn clubs(&self) -> Option<&Vec<::models::SummaryClub>> {
self.clubs.as_ref()
}
pub fn reset_clubs(&mut self) {
self.clubs = None;
}
pub fn set_bikes(&mut self, bikes: Vec<::models::SummaryGear>) {
self.bikes = Some(bikes);
}
pub fn with_bikes(mut self, bikes: Vec<::models::SummaryGear>) -> DetailedAthlete {
self.bikes = Some(bikes);
self
}
pub fn bikes(&self) -> Option<&Vec<::models::SummaryGear>> {
self.bikes.as_ref()
}
pub fn reset_bikes(&mut self) {
self.bikes = None;
}
pub fn set_shoes(&mut self, shoes: Vec<::models::SummaryGear>) {
self.shoes = Some(shoes);
}
pub fn with_shoes(mut self, shoes: Vec<::models::SummaryGear>) -> DetailedAthlete {
self.shoes = Some(shoes);
self
}
pub fn shoes(&self) -> Option<&Vec<::models::SummaryGear>> {
self.shoes.as_ref()
}
pub fn reset_shoes(&mut self) {
self.shoes = None;
}
}
| 24.407332 | 711 | 0.651786 |
640cf2db212c263992a8d165b5afbebccb3d2da9 | 3,971 | mod apple_base;
mod linux_base;
mod windows_msvc_base;
use crate::host_triple;
use thiserror::Error;
#[derive(Debug, Clone, Copy, Eq, Ord, PartialOrd, PartialEq, Hash)]
pub enum LinkerFlavor {
Ld,
Ld64,
Msvc,
}
/// Everything Mun knows about a target.
/// Every field must be specified, there are no default values.
#[derive(PartialEq, Clone, Debug)]
pub struct Target {
/// Target triple to pass to LLVM
pub llvm_target: String,
/// String to use as the `target_endian` `cfg` variable.
pub target_endian: String,
/// String to use as the `target_pointer_width` `cfg` variable.
pub target_pointer_width: String,
/// Width of c_int type
pub target_c_int_width: String,
/// The name of the OS
pub target_os: String,
/// The name of the environment
pub target_env: String,
/// The name of the vendor
pub target_vendor: String,
/// The name of the architecture. For example "x86" or "x86_64"
pub arch: String,
/// [Data layout](http://llvm.org/docs/LangRef.html#data-layout) to pass to LLVM.
pub data_layout: String,
/// Linker flavor
pub linker_flavor: LinkerFlavor,
/// Optional settings
pub options: TargetOptions,
}
/// Optional aspects of target specification.
#[derive(PartialEq, Clone, Debug)]
pub struct TargetOptions {
/// True if this is a built-in target
pub is_builtin: bool,
/// Default CPU to pass to LLVM. Corresponds to `llc -mcpu=$cpu`. Defaults to "generic".
pub cpu: String,
/// Default target features to pass to LLVM. These features will *always* be passed, and cannot
/// be disabled even via `-C`. Corresponds to `llc -mattr=$features`.
pub features: String,
/// String to prepend to the name of every dynamic library. Defaults to "lib".
pub dll_prefix: String,
/// Whether the target toolchain is like Windows
pub is_like_windows: bool,
}
impl Default for TargetOptions {
fn default() -> Self {
TargetOptions {
is_builtin: false,
cpu: "generic".to_string(),
features: "".to_string(),
dll_prefix: "lib".to_string(),
is_like_windows: false,
}
}
}
#[derive(Error, Debug)]
pub enum LoadTargetError {
#[error("target not found: {0}")]
BuiltinTargetNotFound(String),
#[error("{0}")]
Other(String),
}
pub type TargetResult = Result<Target, String>;
macro_rules! supported_targets {
( $(($( $triple:literal, )+ $module:ident ),)+ ) => {
$ ( mod $ module; ) +
/// List of supported targets
const TARGETS: &[&str] = &[$($($triple),+),+];
fn load_specific(target: &str) -> Result<Target, LoadTargetError> {
match target {
$(
$($triple)|+ => {
let mut t = $module::target()
.map_err(LoadTargetError::Other)?;
t.options.is_builtin = true;
log::debug!("got builtin target: {:?}", t);
Ok(t)
},
)+
_ => Err(LoadTargetError::BuiltinTargetNotFound(
format!("Unable to find target: {}", target)))
}
}
pub fn get_targets() -> impl Iterator<Item = String> {
TARGETS.iter().filter_map(|t| -> Option<String> {
load_specific(t)
.and(Ok(t.to_string()))
.ok()
})
}
}
}
supported_targets!(
("x86_64-apple-darwin", x86_64_apple_darwin),
("x86_64-pc-windows-msvc", x86_64_pc_windows_msvc),
("x86_64-unknown-linux-gnu", x86_64_unknown_linux_gnu),
);
impl Target {
pub fn search(target_triple: &str) -> Result<Target, LoadTargetError> {
load_specific(target_triple)
}
pub fn host_target() -> Result<Target, LoadTargetError> {
Self::search(host_triple())
}
}
| 27.769231 | 99 | 0.585243 |
33a9dbe15ac7a118eb1a8fa1545fad8a0dd84539 | 2,425 | // Code written by Sam Rijs (https://github.com/srijs)
//
// Source:
// https://github.com/hyperium/hyper/issues/1335
// https://play.rust-lang.org/?gist=971e438cabd6f91efb76b7e45b15edf3&version=stable
use std::mem::replace;
use std::string::FromUtf8Error;
use futures::{Async, Poll, Stream};
use futures::stream::{Fuse};
use std::iter::Iterator;
pub(crate) struct Lines<S: Stream> {
buffered: Option<Vec<u8>>,
stream: Fuse<S>
}
impl<S: Stream> Lines<S> {
pub fn new(stream: S) -> Lines<S> {
Lines {
buffered: None,
stream: stream.fuse()
}
}
fn process(&mut self, flush: bool) -> Option<Result<String, FromUtf8Error>> {
let buffered = replace(&mut self.buffered, None);
if let Some(ref buffer) = buffered {
let mut split = buffer.splitn(2, |c| *c == b'\n');
if let Some(first) = split.next() {
if let Some(second) = split.next() {
replace(&mut self.buffered, Some(second.to_vec()));
return Some(String::from_utf8(first.to_vec()));
} else if flush {
return Some(String::from_utf8(first.to_vec()));
}
}
}
replace(&mut self.buffered, buffered);
None
}
}
impl<S> Stream for Lines<S>
where S: Stream, S::Item: AsRef<[u8]>, S::Error: From<FromUtf8Error>
{
type Item = String;
type Error = S::Error;
fn poll(&mut self) -> Poll<Option<String>, S::Error> {
match self.stream.poll()? {
Async::NotReady => Ok(Async::NotReady),
Async::Ready(None) => {
match self.process(true) {
Some(Ok(line)) => Ok(Async::Ready(Some(line))),
Some(Err(err)) => Err(err.into()),
None => Ok(Async::Ready(None))
}
},
Async::Ready(Some(chunk)) => {
if let Some(ref mut buffer) = self.buffered {
buffer.extend(chunk.as_ref());
} else {
self.buffered = Some(chunk.as_ref().to_vec());
}
match self.process(false) {
Some(Ok(line)) => Ok(Async::Ready(Some(line))),
Some(Err(err)) => Err(err.into()),
None => Ok(Async::NotReady)
}
}
}
}
}
| 31.493506 | 83 | 0.501031 |
7151685f0396683056be2207634c8cef4fe11737 | 5,741 | #![feature(core_intrinsics)] // intrinsic division requires nightly
#![no_std]
#![no_main]
use klee_sys::*;
extern crate cortex_m;
extern crate panic_klee;
use cortex_m::peripheral::Peripherals;
use core::{intrinsics::unchecked_div, num::Wrapping, ptr::read_volatile};
#[no_mangle]
fn main() {
let peripherals = Peripherals::take().unwrap();
let mut dwt = peripherals.DWT;
dwt.enable_cycle_counter();
let a = dwt.cyccnt.read();
let b = dwt.cyccnt.read();
let c = dwt.cyccnt.read();
// let d = (Wrapping(c) - (Wrapping(b) - Wrapping(100))).0;
// klee_assume(d != 0);
unsafe {
let some_time_quota = unchecked_div(a, (Wrapping(c) - (Wrapping(b) - Wrapping(100))).0);
read_volatile(&some_time_quota); // prevent optimization in release mode
}
}
// Notice this example currently requires the nightly build of Rust.
// > rustup override set nightly
// When you are done with this example, you may return to stable Rust.
// > rust override unset
//
// > cargo klee --example cortex_m_test_nightly -r -k -g -v
// ...
// KLEE: WARNING: undefined reference to function: rust_eh_personality
// KLEE: ERROR: examples/cortex_m_test_nightly.rs:23: divide by zero
// KLEE: NOTE: now ignoring this error at this location
//
// KLEE: done: total instructions = 1446
// KLEE: done: completed paths = 4
// KLEE: done: generated tests = 3
// ..
//(gdb) shell ls klee-last
// assembly.ll info messages.txt run.istats run.stats test000001.div.err test000001.kquery test000001.ktest test000002.ktest test000003.ktest warnings.txt
//
// So we see that test000001.ktest was causing a division error,
// the other test case passed
//
// (gdb) set env KTEST_FILE=klee-last/test000001.ktest
// (gdb) run
// Starting program: /home/pln/rust/trustit/klee-examples/target/debug/examples/cortex_m_test_nightly.replay
// Program received signal SIGFPE, Arithmetic exception.
// 0x0000555555555525 in main () at examples/cortex_m_test_nightly.rs:23
// 23 let some_time_quota = unchecked_div(a, (Wrapping(c) - (Wrapping(b) - Wrapping(100))).0);
//
// Let's look at the actual test
// (gdb) shell ktest-tool klee-last/test000001.ktest
// ktest file : 'klee-last/test000001.ktest'
// args : ['/home/pln/rust/trustit/klee-examples/target/debug/examples/cortex_m_test_nightly-dd58a25289c18430.ll']
// num objects: 5
// object 0: name: 'PRIMASK'
// object 0: size: 4
// object 0: data: b'\x01\x01\x01\x01'
// object 0: hex : 0x01010101
// object 0: int : 16843009
// object 0: uint: 16843009
// object 0: text: ....
// object 1: name: 'vcell'
// object 1: size: 4
// object 1: data: b'\x00\x00\x00\x00'
// object 1: hex : 0x00000000
// object 1: int : 0
// object 1: uint: 0
// object 1: text: ....
// object 2: name: 'vcell'
// object 2: size: 4
// object 2: data: b'\x00\x00\x00\x00'
// object 2: hex : 0x00000000
// object 2: int : 0
// object 2: uint: 0
// object 2: text: ....
// object 3: name: 'vcell'
// object 3: size: 4
// object 3: data: b'd\x00\x00\x00'
// object 3: hex : 0x64000000
// object 3: int : 100
// object 3: uint: 100
// object 3: text: d...
// object 4: name: 'vcell'
// object 4: size: 4
// object 4: data: b'\x00\x00\x00\x00'
// object 4: hex : 0x00000000
// object 4: int : 0
// object 4: uint: 0
// object 4: text: ....
//
// (gdb) backtrace
// #0 0x0000555555555525 in main () at examples/cortex_m_test_nightly.rs:23
// (gdb) print a
// $1 = 0
// (gdb) print b
// $2 = 100
// (gdb) print c
// $3 = 0
//
// In order to analyze hardware dependent code, hardware access are treated
// as a new symbolic value. In `cortex-m` we give symbolic names to core peripherals.
// The svd2rust generated PAC is currently given the symbolic name `vcell`. This
// might in the future change to giving the address to the register instead.
//
// A breakdown of the example:
// Behind the scenes the PRIMASK register is accessed, and given concrete value.
// (Under the hood, `Peripherals.take` executes in a global critical section.)
//
// This access is along the "happy path" towards the error, so any value would
// suffice (in this case 0x01010101 was selected by KLEE).
//
// The first `vcell` access: was done when enabling the cycle counter.
// The rest of accesses stem from reading `a`, `b`, and `c`.
// Critical here is that KLEE spots that `(c - (b - 100)) = 0`, leading up to a division
// by zero error (satisfied by `c == 0` and `b == 100`)
//
// Notice here, that this error is spotted EVEN while we are telling
// Rust to use the primitive (intrinsic) division for "unchecked_div" performance.
//
// Now re-run the example in --release mode.
// You should find that the error is spotted but the variables are in registers,
// so `print` won't work.
//
// Discussion:
// We can allow for AGGRESSIVE optimization by proving the absence of errors.
// In this case we use the Wrapping for unchecked wrapping arithmetics (along the lines of C/C++)
// and primitive unchecked (intrinsic) division.
//
// Checked arithmetics comes with a high prise at run-time, and for embedded not
// only affects the execution time but also power consumption.
//
// We can fearlessly apply optimisations (including intrinsic/primitive operations)
// and let the tool prove that the code is free of potential errors.
//
// Try uncommenting lines 22 and 23. This introduces a sufficient assumption
// for KLEE to prove the absence of errors. (Beware that this guarantee
// holds only for the exact source code given. If you change anything
// the analysis needs to be re-done.)
//
// In conclusion, programs proven free of errors offer both
// - improved performance (allow safe use of intrinsics), and
// - improved reliability/correctness
// at the same time.
// This is the way!
| 37.522876 | 164 | 0.696046 |
b9862de677ff78b79b49bc9c069fc0027e2895a7 | 2,943 | //! This file provides snippet completions, like `pd` => `eprintln!(...)`.
use ide_db::helpers::SnippetCap;
use crate::{
context::PathCompletionContext, item::Builder, CompletionContext, CompletionItem,
CompletionItemKind, CompletionKind, Completions,
};
fn snippet(ctx: &CompletionContext, cap: SnippetCap, label: &str, snippet: &str) -> Builder {
let mut item = CompletionItem::new(CompletionKind::Snippet, ctx.source_range(), label);
item.insert_snippet(cap, snippet).kind(CompletionItemKind::Snippet);
item
}
pub(crate) fn complete_expr_snippet(acc: &mut Completions, ctx: &CompletionContext) {
if ctx.function_def.is_none() {
return;
}
let can_be_stmt = match ctx.path_context {
Some(PathCompletionContext { is_trivial_path: true, can_be_stmt, .. }) => can_be_stmt,
_ => return,
};
let cap = match ctx.config.snippet_cap {
Some(it) => it,
None => return,
};
if can_be_stmt {
snippet(ctx, cap, "pd", "eprintln!(\"$0 = {:?}\", $0);").add_to(acc);
snippet(ctx, cap, "ppd", "eprintln!(\"$0 = {:#?}\", $0);").add_to(acc);
}
}
pub(crate) fn complete_item_snippet(acc: &mut Completions, ctx: &CompletionContext) {
if !ctx.expects_item() {
return;
}
let cap = match ctx.config.snippet_cap {
Some(it) => it,
None => return,
};
let mut item = snippet(
ctx,
cap,
"tmod (Test module)",
"\
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn ${1:test_name}() {
$0
}
}",
);
item.lookup_by("tmod");
item.add_to(acc);
let mut item = snippet(
ctx,
cap,
"tfn (Test function)",
"\
#[test]
fn ${1:feature}() {
$0
}",
);
item.lookup_by("tfn");
item.add_to(acc);
let item = snippet(ctx, cap, "macro_rules", "macro_rules! $1 {\n\t($2) => {\n\t\t$0\n\t};\n}");
item.add_to(acc);
}
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
use crate::{test_utils::completion_list, CompletionKind};
fn check(ra_fixture: &str, expect: Expect) {
let actual = completion_list(ra_fixture, CompletionKind::Snippet);
expect.assert_eq(&actual)
}
#[test]
fn completes_snippets_in_expressions() {
check(
r#"fn foo(x: i32) { $0 }"#,
expect![[r#"
sn pd
sn ppd
"#]],
);
}
#[test]
fn should_not_complete_snippets_in_path() {
check(r#"fn foo(x: i32) { ::foo$0 }"#, expect![[""]]);
check(r#"fn foo(x: i32) { ::$0 }"#, expect![[""]]);
}
#[test]
fn completes_snippets_in_items() {
check(
r#"
#[cfg(test)]
mod tests {
$0
}
"#,
expect![[r#"
sn tmod (Test module)
sn tfn (Test function)
sn macro_rules
"#]],
)
}
}
| 23.357143 | 99 | 0.538906 |
01f985fd6dd95e06e4bfa2bdd6e599c23da02300 | 897 | // Copyright (c) 2022 The Quantii Contributors
//
// This file is part of Quantii.
//
// Quantii is free software: you can redistribute
// it and/or modify it under the terms of the GNU
// Lesser General Public License as published by
// the Free Software Foundation, either version 3
// of the License, or (at your option) any later
// version.
//
// Quantii is distributed in the hope that it
// will be useful, but WITHOUT ANY WARRANTY;
// without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR
// PURPOSE. See the GNU Lesser General Public
// License for more details.
//
// You should have received a copy of the GNU
// Lesser General Public License along with
// Quantii. If not, see <https://www.gnu.org/licenses/>.
#[allow(improper_ctypes_definitions)]
#[no_mangle]
pub extern "C" fn kernel_config() -> &'static str {
include_str!("quantii_config.txt")
}
| 32.035714 | 56 | 0.730212 |
e6ae3669d4041a10487fb2edf9addb344e84e461 | 1,514 | //! Endpoints for the public room directory.
pub mod get_public_rooms;
pub mod get_public_rooms_filtered;
pub mod get_room_visibility;
pub mod set_room_visibility;
use js_int::UInt;
use ruma_identifiers::{RoomAliasId, RoomId};
use serde::{Deserialize, Serialize};
/// A chunk of a room list response, describing one room
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct PublicRoomsChunk {
/// Aliases of the room.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub aliases: Vec<RoomAliasId>,
/// The canonical alias of the room, if any.
#[serde(skip_serializing_if = "Option::is_none")]
pub canonical_alias: Option<RoomAliasId>,
/// The name of the room, if any.
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// The number of members joined to the room.
pub num_joined_members: UInt,
/// The ID of the room.
pub room_id: RoomId,
/// The topic of the room, if any.
#[serde(skip_serializing_if = "Option::is_none")]
pub topic: Option<String>,
/// Whether the room may be viewed by guest users without joining.
pub world_readable: bool,
/// Whether guest users may join the room and participate in it.
///
/// If they can, they will be subject to ordinary power level rules like any other user.
pub guest_can_join: bool,
/// The URL for the room's avatar, if one is set.
#[serde(skip_serializing_if = "Option::is_none")]
pub avatar_url: Option<String>,
}
| 30.897959 | 92 | 0.693527 |
507c7069d8ee42ecb23e22ac37067c30875e5f7d | 113,905 | // Copyright 2018-2020 Cargill Incorporated
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::cell::RefCell;
use std::collections::{BTreeMap, HashMap, HashSet, VecDeque};
use std::env;
use std::iter::FromIterator;
use std::sync::{Arc, RwLock};
use std::time::SystemTime;
use protobuf::{Message, RepeatedField};
use crate::circuit::SplinterState;
use crate::circuit::{
service::SplinterNode as StateNode,
service::{Service, ServiceId},
AuthorizationType, Circuit as StateCircuit, DurabilityType, PersistenceType, RouteType,
ServiceDefinition as StateServiceDefinition,
};
use crate::consensus::{Proposal, ProposalId};
use crate::hex::to_hex;
use crate::keys::{KeyPermissionManager, KeyRegistry};
use crate::network::{
auth::{AuthorizationInquisitor, PeerAuthorizationState},
peer::PeerConnector,
};
use crate::orchestrator::{ServiceDefinition, ServiceOrchestrator, ShutdownServiceError};
use crate::protos::admin::{
AdminMessage, AdminMessage_Type, Circuit, CircuitManagementPayload,
CircuitManagementPayload_Action, CircuitManagementPayload_Header, CircuitProposal,
CircuitProposalVote, CircuitProposalVote_Vote, CircuitProposal_ProposalType,
CircuitProposal_VoteRecord, Circuit_AuthorizationType, Circuit_DurabilityType,
Circuit_PersistenceType, Circuit_RouteType, MemberReady,
};
use crate::service::error::ServiceError;
use crate::service::ServiceNetworkSender;
use crate::signing::SignatureVerifier;
use crate::storage::sets::mem::DurableBTreeSet;
use super::error::{AdminSharedError, MarshallingError};
use super::mailbox::Mailbox;
use super::messages;
use super::open_proposals::{OpenProposals, Proposals};
use super::{admin_service_id, sha256, AdminServiceEventSubscriber, AdminSubscriberError, Events};
const DEFAULT_STATE_DIR: &str = "/var/lib/splinter/";
const STATE_DIR_ENV: &str = "SPLINTER_STATE_DIR";
static VOTER_ROLE: &str = "voter";
static PROPOSER_ROLE: &str = "proposer";
const DEFAULT_IN_MEMORY_EVENT_LIMIT: usize = 100;
type UnpeeredPendingPayload = (Vec<String>, CircuitManagementPayload);
enum CircuitProposalStatus {
Accepted,
Rejected,
Pending,
}
struct CircuitProposalContext {
pub circuit_proposal: CircuitProposal,
pub action: CircuitManagementPayload_Action,
pub signer_public_key: Vec<u8>,
}
struct UninitializedCircuit {
pub circuit: Option<CircuitProposal>,
pub ready_members: HashSet<String>,
}
struct SubscriberMap {
subscribers_by_type: RefCell<HashMap<String, Vec<Box<dyn AdminServiceEventSubscriber>>>>,
}
impl SubscriberMap {
fn new() -> Self {
Self {
subscribers_by_type: RefCell::new(HashMap::new()),
}
}
fn broadcast_by_type(
&self,
event_type: &str,
admin_service_event: &messages::AdminServiceEvent,
timestamp: &SystemTime,
) {
let mut subscribers_by_type = self.subscribers_by_type.borrow_mut();
if let Some(subscribers) = subscribers_by_type.get_mut(event_type) {
subscribers.retain(|subscriber| {
match subscriber.handle_event(admin_service_event, timestamp) {
Ok(()) => true,
Err(AdminSubscriberError::Unsubscribe) => false,
Err(AdminSubscriberError::UnableToHandleEvent(msg)) => {
error!("Unable to send event: {}", msg);
true
}
}
});
}
}
fn add_subscriber(
&mut self,
event_type: String,
listener: Box<dyn AdminServiceEventSubscriber>,
) {
let mut subscribers_by_type = self.subscribers_by_type.borrow_mut();
let subscribers = subscribers_by_type
.entry(event_type)
.or_insert_with(Vec::new);
subscribers.push(listener);
}
fn clear(&mut self) {
self.subscribers_by_type.borrow_mut().clear()
}
}
pub struct AdminServiceShared {
// the node id of the connected splinter node
node_id: String,
// the list of circuit proposal that are being voted on by members of a circuit
open_proposals: OpenProposals,
// the list of circuit that have been committed to splinter state but whose services haven't
// been initialized
uninitialized_circuits: HashMap<String, UninitializedCircuit>,
// orchestrator used to initialize and shutdown services
orchestrator: ServiceOrchestrator,
// list of services that have been initialized using the orchestrator
running_services: HashSet<ServiceDefinition>,
// peer connector used to connect to new members listed in a circuit
peer_connector: PeerConnector,
// auth inquisitor
auth_inquisitor: Box<dyn AuthorizationInquisitor>,
// network sender is used to comunicated with other services on the splinter network
network_sender: Option<Box<dyn ServiceNetworkSender>>,
// the CircuitManagementPayloads that require peers to be fully authorized before they can go
// through consensus
unpeered_payloads: Vec<UnpeeredPendingPayload>,
// CircuitManagmentPayloads that still need to go through consensus
pending_circuit_payloads: VecDeque<CircuitManagementPayload>,
// The pending consensus proposals
pending_consensus_proposals: HashMap<ProposalId, (Proposal, CircuitManagementPayload)>,
// the pending changes for the current proposal
pending_changes: Option<CircuitProposalContext>,
// the verifiers that should be broadcasted for the pending change
current_consensus_verifiers: Vec<String>,
// Admin Service Event Subscribers
event_subscribers: SubscriberMap,
// Mailbox of AdminServiceEvent values
event_mailbox: Mailbox,
// copy of splinter state
splinter_state: Arc<RwLock<SplinterState>>,
// signature verifier
signature_verifier: Box<dyn SignatureVerifier + Send>,
key_registry: Box<dyn KeyRegistry>,
key_permission_manager: Box<dyn KeyPermissionManager>,
}
impl AdminServiceShared {
#![allow(clippy::too_many_arguments)]
pub fn new(
node_id: String,
orchestrator: ServiceOrchestrator,
peer_connector: PeerConnector,
auth_inquisitor: Box<dyn AuthorizationInquisitor>,
splinter_state: Arc<RwLock<SplinterState>>,
signature_verifier: Box<dyn SignatureVerifier + Send>,
key_registry: Box<dyn KeyRegistry>,
key_permission_manager: Box<dyn KeyPermissionManager>,
storage_type: &str,
) -> Result<Self, ServiceError> {
let location = {
if let Ok(s) = env::var(STATE_DIR_ENV) {
s
} else {
DEFAULT_STATE_DIR.to_string()
}
};
let storage_location = match storage_type {
"yaml" => format!("{}{}", location, "/circuit_proposals.yaml"),
"memory" => "memory".to_string(),
_ => panic!("Storage type is not supported: {}", storage_type),
};
let open_proposals = OpenProposals::new(storage_location)
.map_err(|err| ServiceError::UnableToCreate(Box::new(err)))?;
let event_mailbox = Mailbox::new(DurableBTreeSet::new_boxed_with_bound(
std::num::NonZeroUsize::new(DEFAULT_IN_MEMORY_EVENT_LIMIT).unwrap(),
));
Ok(AdminServiceShared {
node_id,
network_sender: None,
open_proposals,
uninitialized_circuits: Default::default(),
orchestrator,
running_services: HashSet::new(),
peer_connector,
auth_inquisitor,
unpeered_payloads: Vec::new(),
pending_circuit_payloads: VecDeque::new(),
pending_consensus_proposals: HashMap::new(),
pending_changes: None,
current_consensus_verifiers: Vec::new(),
event_subscribers: SubscriberMap::new(),
event_mailbox,
splinter_state,
signature_verifier,
key_registry,
key_permission_manager,
})
}
pub fn node_id(&self) -> &str {
&self.node_id
}
pub fn network_sender(&self) -> &Option<Box<dyn ServiceNetworkSender>> {
&self.network_sender
}
pub fn auth_inquisitor(&self) -> &dyn AuthorizationInquisitor {
&*self.auth_inquisitor
}
pub fn set_network_sender(&mut self, network_sender: Option<Box<dyn ServiceNetworkSender>>) {
self.network_sender = network_sender;
}
pub fn pop_pending_circuit_payload(&mut self) -> Option<CircuitManagementPayload> {
self.pending_circuit_payloads.pop_front()
}
pub fn pending_consensus_proposals(
&self,
id: &ProposalId,
) -> Option<&(Proposal, CircuitManagementPayload)> {
self.pending_consensus_proposals.get(id)
}
pub fn remove_pending_consensus_proposals(
&mut self,
id: &ProposalId,
) -> Option<(Proposal, CircuitManagementPayload)> {
self.pending_consensus_proposals.remove(id)
}
pub fn add_pending_consensus_proposal(
&mut self,
id: ProposalId,
proposal: (Proposal, CircuitManagementPayload),
) {
self.pending_consensus_proposals.insert(id, proposal);
}
pub fn current_consensus_verifiers(&self) -> &Vec<String> {
&self.current_consensus_verifiers
}
pub fn commit(&mut self) -> Result<(), AdminSharedError> {
match self.pending_changes.take() {
Some(circuit_proposal_context) => {
let circuit_proposal = circuit_proposal_context.circuit_proposal;
let action = circuit_proposal_context.action;
let circuit_id = circuit_proposal.get_circuit_id();
let mgmt_type = circuit_proposal
.get_circuit_proposal()
.circuit_management_type
.clone();
match self.check_approved(&circuit_proposal) {
Ok(CircuitProposalStatus::Accepted) => {
// commit new circuit
let circuit = circuit_proposal.get_circuit_proposal();
self.update_splinter_state(circuit)?;
// remove approved proposal
self.remove_proposal(&circuit_id)?;
// send message about circuit acceptance
let circuit_proposal_proto =
messages::CircuitProposal::from_proto(circuit_proposal.clone())
.map_err(AdminSharedError::InvalidMessageFormat)?;
let event = messages::AdminServiceEvent::ProposalAccepted((
circuit_proposal_proto,
circuit_proposal_context.signer_public_key,
));
self.send_event(&mgmt_type, event);
// send MEMBER_READY message to all other members' admin services
if let Some(ref network_sender) = self.network_sender {
let mut member_ready = MemberReady::new();
member_ready.set_circuit_id(circuit.circuit_id.clone());
member_ready.set_member_node_id(self.node_id.clone());
let mut msg = AdminMessage::new();
msg.set_message_type(AdminMessage_Type::MEMBER_READY);
msg.set_member_ready(member_ready);
let envelope_bytes =
msg.write_to_bytes().map_err(MarshallingError::from)?;
for member in circuit.members.iter() {
if member.get_node_id() != self.node_id {
network_sender.send(
&admin_service_id(member.get_node_id()),
&envelope_bytes,
)?;
}
}
}
// add circuit as pending initialization
self.add_uninitialized_circuit(circuit_proposal.clone())
}
Ok(CircuitProposalStatus::Pending) => {
self.add_proposal(circuit_proposal.clone())?;
match action {
CircuitManagementPayload_Action::CIRCUIT_CREATE_REQUEST => {
// notify registered application authorization handlers of the
// committed circuit proposal
let event = messages::AdminServiceEvent::ProposalSubmitted(
messages::CircuitProposal::from_proto(circuit_proposal.clone())
.map_err(AdminSharedError::InvalidMessageFormat)?,
);
self.send_event(&mgmt_type, event);
info!("committed changes for new circuit proposal {}", circuit_id);
Ok(())
}
CircuitManagementPayload_Action::CIRCUIT_PROPOSAL_VOTE => {
// notify registered application authorization handlers of the
// committed circuit proposal
let circuit_proposal_proto =
messages::CircuitProposal::from_proto(circuit_proposal.clone())
.map_err(AdminSharedError::InvalidMessageFormat)?;
let event = messages::AdminServiceEvent::ProposalVote((
circuit_proposal_proto,
circuit_proposal_context.signer_public_key,
));
self.send_event(&mgmt_type, event);
info!("committed vote for circuit proposal {}", circuit_id);
Ok(())
}
_ => Err(AdminSharedError::UnknownAction(format!(
"Received unknown action: {:?}",
action
))),
}
}
Ok(CircuitProposalStatus::Rejected) => {
// remove circuit
self.remove_proposal(&circuit_id)?;
let circuit_proposal_proto =
messages::CircuitProposal::from_proto(circuit_proposal.clone())
.map_err(AdminSharedError::InvalidMessageFormat)?;
let event = messages::AdminServiceEvent::ProposalRejected((
circuit_proposal_proto,
circuit_proposal_context.signer_public_key,
));
self.send_event(&mgmt_type, event);
info!("circuit proposal for {} has been rejected", circuit_id);
Ok(())
}
Err(err) => Err(err),
}
}
None => Err(AdminSharedError::NoPendingChanges),
}
}
pub fn rollback(&mut self) -> Result<(), AdminSharedError> {
match self.pending_changes.take() {
Some(circuit_proposal_context) => info!(
"discarded change for {}",
circuit_proposal_context.circuit_proposal.get_circuit_id()
),
None => debug!("no changes to rollback"),
}
Ok(())
}
pub fn propose_change(
&mut self,
mut circuit_payload: CircuitManagementPayload,
) -> Result<(String, CircuitProposal), AdminSharedError> {
let header = protobuf::parse_from_bytes::<CircuitManagementPayload_Header>(
circuit_payload.get_header(),
)
.map_err(MarshallingError::from)?;
self.validate_circuit_management_payload(&circuit_payload, &header)?;
self.verify_signature(&circuit_payload).map_err(|_| {
AdminSharedError::ValidationFailed(String::from("Unable to verify signature"))
})?;
match header.get_action() {
CircuitManagementPayload_Action::CIRCUIT_CREATE_REQUEST => {
let mut create_request = circuit_payload.take_circuit_create_request();
let proposed_circuit = create_request.take_circuit();
let mut verifiers = vec![];
for member in proposed_circuit.get_members() {
verifiers.push(admin_service_id(member.get_node_id()));
}
let signer_public_key = header.get_requester();
let requester_node_id = header.get_requester_node_id();
self.validate_create_circuit(
&proposed_circuit,
signer_public_key,
requester_node_id,
)?;
debug!("proposing {}", proposed_circuit.get_circuit_id());
let mut circuit_proposal = CircuitProposal::new();
circuit_proposal.set_proposal_type(CircuitProposal_ProposalType::CREATE);
circuit_proposal.set_circuit_id(proposed_circuit.get_circuit_id().into());
circuit_proposal.set_circuit_hash(sha256(&proposed_circuit)?);
circuit_proposal.set_circuit_proposal(proposed_circuit);
circuit_proposal.set_requester(header.get_requester().to_vec());
circuit_proposal.set_requester_node_id(header.get_requester_node_id().to_string());
let expected_hash = sha256(&circuit_proposal)?;
self.pending_changes = Some(CircuitProposalContext {
circuit_proposal: circuit_proposal.clone(),
signer_public_key: header.get_requester().to_vec(),
action: CircuitManagementPayload_Action::CIRCUIT_CREATE_REQUEST,
});
self.current_consensus_verifiers = verifiers;
Ok((expected_hash, circuit_proposal))
}
CircuitManagementPayload_Action::CIRCUIT_PROPOSAL_VOTE => {
let proposal_vote = circuit_payload.get_circuit_proposal_vote();
// validate vote proposal
// check that the circuit proposal exists
let mut circuit_proposal = self
.get_proposal(proposal_vote.get_circuit_id())
.map_err(|err| {
AdminSharedError::ValidationFailed(format!(
"error occured when trying to get proposal {}",
err
))
})?
.ok_or_else(|| {
AdminSharedError::ValidationFailed(format!(
"Received vote for a proposal that does not exist: circuit id {}",
proposal_vote.circuit_id
))
})?;
let mut verifiers = vec![];
for member in circuit_proposal.get_circuit_proposal().get_members() {
verifiers.push(admin_service_id(member.get_node_id()));
}
let signer_public_key = header.get_requester();
self.validate_circuit_vote(
proposal_vote,
signer_public_key,
&circuit_proposal,
header.get_requester_node_id(),
)?;
// add vote to circuit_proposal
let mut vote_record = CircuitProposal_VoteRecord::new();
vote_record.set_public_key(signer_public_key.to_vec());
vote_record.set_vote(proposal_vote.get_vote());
vote_record.set_voter_node_id(header.get_requester_node_id().to_string());
let mut votes = circuit_proposal.get_votes().to_vec();
votes.push(vote_record);
circuit_proposal.set_votes(RepeatedField::from_vec(votes));
let expected_hash = sha256(&circuit_proposal)?;
self.pending_changes = Some(CircuitProposalContext {
circuit_proposal: circuit_proposal.clone(),
signer_public_key: header.get_requester().to_vec(),
action: CircuitManagementPayload_Action::CIRCUIT_PROPOSAL_VOTE,
});
self.current_consensus_verifiers = verifiers;
Ok((expected_hash, circuit_proposal))
}
CircuitManagementPayload_Action::ACTION_UNSET => Err(
AdminSharedError::ValidationFailed("Action must be set".to_string()),
),
unknown_action => Err(AdminSharedError::ValidationFailed(format!(
"Unable to handle {:?}",
unknown_action
))),
}
}
pub fn has_proposal(&self, circuit_id: &str) -> bool {
self.open_proposals.has_proposal(circuit_id)
}
/// Propose a new circuit
///
/// This operation will propose a new circuit to all the member nodes of the circuit. If there
/// is no peer connection, a connection to the peer will also be established.
pub fn propose_circuit(
&mut self,
payload: CircuitManagementPayload,
) -> Result<(), ServiceError> {
debug!(
"received circuit proposal for {}",
payload
.get_circuit_create_request()
.get_circuit()
.get_circuit_id()
);
let mut unauthorized_peers = vec![];
for node in payload
.get_circuit_create_request()
.get_circuit()
.get_members()
{
if self.node_id() != node.get_node_id() {
if self.auth_inquisitor.is_authorized(node.get_node_id()) {
continue;
}
debug!("Connecting to node {:?}", node);
self.peer_connector
.connect_peer(node.get_node_id(), node.get_endpoint())
.map_err(|err| ServiceError::UnableToHandleMessage(Box::new(err)))?;
unauthorized_peers.push(node.get_node_id().into());
}
}
if unauthorized_peers.is_empty() {
self.pending_circuit_payloads.push_back(payload);
} else {
debug!(
"Members {:?} added; awaiting network authorization before proceeding",
&unauthorized_peers
);
self.unpeered_payloads.push((unauthorized_peers, payload));
}
Ok(())
}
pub fn propose_vote(&mut self, payload: CircuitManagementPayload) -> Result<(), ServiceError> {
debug!(
"received circuit vote for {}",
payload.get_circuit_proposal_vote().get_circuit_id()
);
self.pending_circuit_payloads.push_back(payload);
Ok(())
}
pub fn submit(&mut self, payload: CircuitManagementPayload) -> Result<(), ServiceError> {
debug!("Payload submitted: {:?}", payload);
let header =
protobuf::parse_from_bytes::<CircuitManagementPayload_Header>(payload.get_header())?;
self.validate_circuit_management_payload(&payload, &header)
.map_err(|err| ServiceError::UnableToHandleMessage(Box::new(err)))?;
self.verify_signature(&payload)?;
match header.get_action() {
CircuitManagementPayload_Action::CIRCUIT_CREATE_REQUEST => {
self.propose_circuit(payload)
}
CircuitManagementPayload_Action::CIRCUIT_PROPOSAL_VOTE => self.propose_vote(payload),
CircuitManagementPayload_Action::ACTION_UNSET => {
Err(ServiceError::UnableToHandleMessage(Box::new(
AdminSharedError::ValidationFailed(String::from("No action specified")),
)))
}
unknown_action => Err(ServiceError::UnableToHandleMessage(Box::new(
AdminSharedError::ValidationFailed(format!(
"Unable to handle {:?}",
unknown_action
)),
))),
}
}
pub fn get_events_since(
&self,
since_timestamp: &SystemTime,
circuit_management_type: &str,
) -> Result<Events, AdminSharedError> {
let events = self
.event_mailbox
.iter_since(*since_timestamp)
.map_err(|err| AdminSharedError::UnableToAddSubscriber(err.to_string()))?;
let circuit_management_type = circuit_management_type.to_string();
Ok(Events {
inner: Box::new(events.filter(move |(_, evt)| {
evt.proposal().circuit.circuit_management_type == circuit_management_type
})),
})
}
pub fn add_subscriber(
&mut self,
circuit_management_type: String,
subscriber: Box<dyn AdminServiceEventSubscriber>,
) -> Result<(), AdminSharedError> {
self.event_subscribers
.add_subscriber(circuit_management_type, subscriber);
Ok(())
}
pub fn send_event(
&mut self,
circuit_management_type: &str,
event: messages::AdminServiceEvent,
) {
let (ts, event) = match self.event_mailbox.add(event) {
Ok((ts, event)) => (ts, event),
Err(err) => {
error!("Unable to store admin event: {}", err);
return;
}
};
self.event_subscribers
.broadcast_by_type(&circuit_management_type, &event, &ts);
}
pub fn remove_all_event_subscribers(&mut self) {
self.event_subscribers.clear();
}
pub fn on_authorization_change(&mut self, peer_id: &str, state: PeerAuthorizationState) {
let mut unpeered_payloads = std::mem::replace(&mut self.unpeered_payloads, vec![]);
for (ref mut peers, _) in unpeered_payloads.iter_mut() {
match state {
PeerAuthorizationState::Authorized => {
peers.retain(|unpeered_id| unpeered_id != peer_id);
}
PeerAuthorizationState::Unauthorized => {
if peers.iter().any(|unpeered_id| unpeered_id == peer_id) {
warn!("Dropping circuit request including peer {}, due to authorization failure", peer_id);
peers.clear();
}
}
}
}
let (fully_peered, still_unpeered): (
Vec<UnpeeredPendingPayload>,
Vec<UnpeeredPendingPayload>,
) = unpeered_payloads
.into_iter()
.partition(|(peers, _)| peers.is_empty());
std::mem::replace(&mut self.unpeered_payloads, still_unpeered);
if state == PeerAuthorizationState::Authorized {
self.pending_circuit_payloads
.extend(fully_peered.into_iter().map(|(_, payload)| payload));
}
}
pub fn get_proposal(
&self,
circuit_id: &str,
) -> Result<Option<CircuitProposal>, AdminSharedError> {
Ok(self.open_proposals.get_proposal(circuit_id)?)
}
pub fn get_proposals(&self) -> Proposals {
self.open_proposals.get_proposals()
}
pub fn remove_proposal(
&mut self,
circuit_id: &str,
) -> Result<Option<CircuitProposal>, AdminSharedError> {
Ok(self.open_proposals.remove_proposal(circuit_id)?)
}
pub fn add_proposal(
&mut self,
circuit_proposal: CircuitProposal,
) -> Result<Option<CircuitProposal>, AdminSharedError> {
Ok(self.open_proposals.add_proposal(circuit_proposal)?)
}
/// Add a circuit definition as an uninitialized circuit. If all members are ready, initialize
/// services.
fn add_uninitialized_circuit(
&mut self,
circuit: CircuitProposal,
) -> Result<(), AdminSharedError> {
let circuit_id = circuit.get_circuit_id().to_string();
// If uninitialized circuit already exists, add the circuit definition; if not, create the
// uninitialized circuit.
match self.uninitialized_circuits.get_mut(&circuit_id) {
Some(uninit_circuit) => uninit_circuit.circuit = Some(circuit),
None => {
self.uninitialized_circuits.insert(
circuit_id.to_string(),
UninitializedCircuit {
circuit: Some(circuit),
ready_members: HashSet::new(),
},
);
}
}
// Add self as ready
self.uninitialized_circuits
.get_mut(&circuit_id)
.expect("Uninitialized circuit not set")
.ready_members
.insert(self.node_id.clone());
self.initialize_services_if_members_ready(&circuit_id)
}
/// Mark member node as ready to initialize services on the given circuit. If all members are
/// now ready, initialize services.
pub fn add_ready_member(
&mut self,
circuit_id: &str,
member_node_id: String,
) -> Result<(), AdminSharedError> {
// If uninitialized circuit does not already exist, create it
if self.uninitialized_circuits.get(circuit_id).is_none() {
self.uninitialized_circuits.insert(
circuit_id.to_string(),
UninitializedCircuit {
circuit: None,
ready_members: HashSet::new(),
},
);
}
self.uninitialized_circuits
.get_mut(circuit_id)
.expect("Uninitialized circuit not set")
.ready_members
.insert(member_node_id);
self.initialize_services_if_members_ready(circuit_id)
}
/// If all members of an uninitialized circuit are ready, initialize services. Also send
/// CircuitReady notification to application authorization handler.
fn initialize_services_if_members_ready(
&mut self,
circuit_id: &str,
) -> Result<(), AdminSharedError> {
let ready = {
if let Some(uninitialized_circuit) = self.uninitialized_circuits.get(circuit_id) {
if let Some(ref circuit_proposal) = uninitialized_circuit.circuit {
let all_members = HashSet::from_iter(
circuit_proposal
.get_circuit_proposal()
.members
.iter()
.map(|node| node.node_id.clone()),
);
all_members.is_subset(&uninitialized_circuit.ready_members)
} else {
false
}
} else {
false
}
};
if ready {
let circuit_proposal = self
.uninitialized_circuits
.remove(circuit_id)
.expect("Uninitialized circuit not set")
.circuit
.expect("Uninitialized circuit's circuit proposal not set");
self.initialize_services(circuit_proposal.get_circuit_proposal())?;
let mgmt_type = circuit_proposal
.get_circuit_proposal()
.circuit_management_type
.clone();
let event = messages::AdminServiceEvent::CircuitReady(
messages::CircuitProposal::from_proto(circuit_proposal)?,
);
self.send_event(&mgmt_type, event);
}
Ok(())
}
fn validate_create_circuit(
&self,
circuit: &Circuit,
signer_public_key: &[u8],
requester_node_id: &str,
) -> Result<(), AdminSharedError> {
if requester_node_id.is_empty() {
return Err(AdminSharedError::ValidationFailed(
"requester_node_id is empty".to_string(),
));
}
let key_info = self
.key_registry
.get_key(signer_public_key)
.map_err(|err| AdminSharedError::ValidationFailed(err.to_string()))?
.ok_or_else(|| {
AdminSharedError::ValidationFailed(format!(
"{} is not registered for a node",
to_hex(signer_public_key)
))
})?;
if key_info.associated_node_id() != requester_node_id {
return Err(AdminSharedError::ValidationFailed(format!(
"{} is not registered for the node in header",
to_hex(signer_public_key)
)));
};
self.key_permission_manager
.is_permitted(signer_public_key, PROPOSER_ROLE)
.map_err(|_| {
AdminSharedError::ValidationFailed(format!(
"{} is not permitted to vote for node {}",
to_hex(signer_public_key),
key_info.associated_node_id()
))
})?;
if self.has_proposal(circuit.get_circuit_id()) {
return Err(AdminSharedError::ValidationFailed(format!(
"Ignoring duplicate create proposal of circuit {}",
circuit.get_circuit_id()
)));
}
if self
.splinter_state
.read()
.map_err(|_| AdminSharedError::PoisonedLock("Splinter State Read Lock".into()))?
.has_circuit(circuit.get_circuit_id())
{
return Err(AdminSharedError::ValidationFailed(format!(
"Circuit with circuit id {} already exists",
circuit.get_circuit_id()
)));
}
self.validate_circuit(circuit)?;
Ok(())
}
fn validate_circuit(&self, circuit: &Circuit) -> Result<(), AdminSharedError> {
if circuit.get_authorization_type() == Circuit_AuthorizationType::UNSET_AUTHORIZATION_TYPE {
return Err(AdminSharedError::ValidationFailed(
"authorization_type cannot be unset".to_string(),
));
}
if circuit.get_persistence() == Circuit_PersistenceType::UNSET_PERSISTENCE_TYPE {
return Err(AdminSharedError::ValidationFailed(
"persistence_type cannot be unset".to_string(),
));
}
if circuit.get_durability() == Circuit_DurabilityType::UNSET_DURABILITY_TYPE {
return Err(AdminSharedError::ValidationFailed(
"durability_type cannot be unset".to_string(),
));
}
if circuit.get_routes() == Circuit_RouteType::UNSET_ROUTE_TYPE {
return Err(AdminSharedError::ValidationFailed(
"route_type cannot be unset".to_string(),
));
}
if circuit.get_circuit_id().is_empty() {
return Err(AdminSharedError::ValidationFailed(
"circuit_id must be set".to_string(),
));
}
if circuit.get_circuit_management_type().is_empty() {
return Err(AdminSharedError::ValidationFailed(
"circuit_management_type must be set".to_string(),
));
}
let mut members: Vec<String> = Vec::new();
let mut endpoints: Vec<String> = Vec::new();
for member in circuit.get_members() {
let node_id = member.get_node_id().to_string();
if node_id.is_empty() {
return Err(AdminSharedError::ValidationFailed(
"Member node id cannot be empty".to_string(),
));
} else if members.contains(&node_id) {
return Err(AdminSharedError::ValidationFailed(
"Every member must be unique in the circuit.".to_string(),
));
} else {
members.push(node_id);
}
let endpoint = member.get_endpoint().to_string();
if endpoint.is_empty() {
return Err(AdminSharedError::ValidationFailed(
"Member endpoint cannot be empty".to_string(),
));
} else if endpoints.contains(&endpoint) {
return Err(AdminSharedError::ValidationFailed(
"Every member endpoint must be unique in the circuit.".to_string(),
));
} else {
endpoints.push(endpoint);
}
}
if members.is_empty() {
return Err(AdminSharedError::ValidationFailed(
"The circuit must have members".to_string(),
));
}
// check this node is in members
if !members.contains(&self.node_id) {
return Err(AdminSharedError::ValidationFailed(format!(
"Circuit does not contain this node: {}",
self.node_id
)));
}
if circuit.get_roster().is_empty() {
return Err(AdminSharedError::ValidationFailed(
"The circuit must have services".to_string(),
));
}
let mut services: Vec<String> = Vec::new();
// check that all services' allowed nodes are in members
for service in circuit.get_roster() {
if service.get_allowed_nodes().is_empty() {
return Err(AdminSharedError::ValidationFailed(
"Service cannot have an empty allowed nodes list".to_string(),
));
}
if service.get_allowed_nodes().len() > 1 {
return Err(AdminSharedError::ValidationFailed(
"Only one allowed node for a service is supported".to_string(),
));
}
for node in service.get_allowed_nodes() {
if !members.contains(node) {
return Err(AdminSharedError::ValidationFailed(format!(
"Service cannot have an allowed node that is not in members: {}",
node
)));
}
}
let service_id = service.get_service_id().to_string();
if service_id.is_empty() {
return Err(AdminSharedError::ValidationFailed(
"Service id cannot be empty".to_string(),
));
} else if services.contains(&service_id) {
return Err(AdminSharedError::ValidationFailed(
"Every service must be unique in the circuit.".to_string(),
));
} else {
services.push(service_id)
}
}
if circuit.get_circuit_management_type().is_empty() {
return Err(AdminSharedError::ValidationFailed(
"The circuit must have a mangement type".to_string(),
));
}
Ok(())
}
fn validate_circuit_vote(
&self,
proposal_vote: &CircuitProposalVote,
signer_public_key: &[u8],
circuit_proposal: &CircuitProposal,
node_id: &str,
) -> Result<(), AdminSharedError> {
let circuit_hash = proposal_vote.get_circuit_hash();
let key_info = self
.key_registry
.get_key(signer_public_key)
.map_err(|err| AdminSharedError::ValidationFailed(err.to_string()))?
.ok_or_else(|| {
AdminSharedError::ValidationFailed(format!(
"{} is not registered for a node",
to_hex(signer_public_key)
))
})?;
let signer_node = key_info.associated_node_id().to_string();
if signer_node != node_id {
return Err(AdminSharedError::ValidationFailed(format!(
"Payload requester node id does not match the node the key is registered to: {}",
to_hex(circuit_proposal.get_requester())
)));
}
if circuit_proposal.get_requester_node_id() == signer_node {
return Err(AdminSharedError::ValidationFailed(format!(
"Received vote from requester node: {}",
to_hex(circuit_proposal.get_requester())
)));
}
let voted_nodes: Vec<String> = circuit_proposal
.get_votes()
.iter()
.map(|vote| vote.get_voter_node_id().to_string())
.collect();
if voted_nodes.iter().any(|node| *node == signer_node) {
return Err(AdminSharedError::ValidationFailed(format!(
"Received duplicate vote from {} for {}",
signer_node, proposal_vote.circuit_id
)));
}
self.key_permission_manager
.is_permitted(signer_public_key, VOTER_ROLE)
.map_err(|_| {
AdminSharedError::ValidationFailed(format!(
"{} is not permitted to vote for node {}",
to_hex(signer_public_key),
signer_node
))
})?;
// validate hash of circuit
if circuit_proposal.get_circuit_hash() != circuit_hash {
return Err(AdminSharedError::ValidationFailed(format!(
"Hash of circuit does not match circuit proposal: {}",
proposal_vote.circuit_id
)));
}
Ok(())
}
fn validate_circuit_management_payload(
&self,
payload: &CircuitManagementPayload,
header: &CircuitManagementPayload_Header,
) -> Result<(), AdminSharedError> {
// Validate payload signature
if payload.get_signature().is_empty() {
return Err(AdminSharedError::ValidationFailed(
"CircuitManagementPayload signature must be set".to_string(),
));
};
// Validate the payload header
if payload.get_header().is_empty() {
return Err(AdminSharedError::ValidationFailed(
"CircuitManagementPayload header must be set".to_string(),
));
};
// Validate the header, requester field is set
if header.get_requester().is_empty() {
return Err(AdminSharedError::ValidationFailed(
"CircuitManagementPayload must have a requester".to_string(),
));
};
// Validate the header, requester_node_id is set
if header.get_requester_node_id().is_empty() {
return Err(AdminSharedError::ValidationFailed(
"CircuitManagementPayload must have a requester node id".to_string(),
));
};
Ok(())
}
fn check_approved(
&self,
proposal: &CircuitProposal,
) -> Result<CircuitProposalStatus, AdminSharedError> {
let mut received_votes = HashSet::new();
for vote in proposal.get_votes() {
if vote.get_vote() == CircuitProposalVote_Vote::REJECT {
return Ok(CircuitProposalStatus::Rejected);
}
received_votes.insert(vote.get_voter_node_id().to_string());
}
let mut required_votes = proposal
.get_circuit_proposal()
.get_members()
.to_vec()
.iter()
.map(|member| member.get_node_id().to_string())
.collect::<HashSet<String>>();
required_votes.remove(proposal.get_requester_node_id());
if required_votes == received_votes {
Ok(CircuitProposalStatus::Accepted)
} else {
Ok(CircuitProposalStatus::Pending)
}
}
/// Initialize all services that this node should run on the created circuit using the service
/// orchestrator. This may not include all services if they are not supported locally. It is
/// expected that some services will be started externally.
pub fn initialize_services(&mut self, circuit: &Circuit) -> Result<(), AdminSharedError> {
// Get all services this node is allowed to run
let services = circuit
.get_roster()
.iter()
.filter(|service| {
service.allowed_nodes.contains(&self.node_id)
&& self
.orchestrator
.supported_service_types()
.contains(&service.get_service_type().to_string())
})
.collect::<Vec<_>>();
// Start all services the orchestrator has a factory for
for service in services {
let service_definition = ServiceDefinition {
circuit: circuit.circuit_id.clone(),
service_id: service.service_id.clone(),
service_type: service.service_type.clone(),
};
let service_arguments = service
.arguments
.iter()
.map(|arg| (arg.key.clone(), arg.value.clone()))
.collect();
self.orchestrator
.initialize_service(service_definition.clone(), service_arguments)?;
self.running_services.insert(service_definition);
}
Ok(())
}
/// Stops all running services
pub fn stop_services(&mut self) -> Result<(), AdminSharedError> {
let shutdown_errors = self
.running_services
.iter()
.map(|service| {
debug!(
"Stopping service {} in circuit {}",
service.service_type, service.circuit
);
self.orchestrator.shutdown_service(&service)
})
.filter_map(Result::err)
.collect::<Vec<ShutdownServiceError>>();
self.running_services = HashSet::new();
if shutdown_errors.is_empty() {
Ok(())
} else {
Err(AdminSharedError::ServiceShutdownFailed(shutdown_errors))
}
}
/// On restart of a splinter node, all services that this node should run on the existing
/// circuits should be intialized using the service orchestrator. This may not include all
/// services if they are not supported locally. It is expected that some services will be
/// started externally.
pub fn restart_services(&mut self) -> Result<(), AdminSharedError> {
let circuits = self
.splinter_state
.read()
.map_err(|_| AdminSharedError::PoisonedLock("Splinter State Read Lock".into()))?
.circuits()
.clone();
// start all services of the supported types
for (circuit_name, circuit) in circuits.iter() {
// Get all services this node is allowed to run and the orchestrator has a factory for
let services = circuit
.roster()
.iter()
.filter(|service| {
service.allowed_nodes().contains(&self.node_id)
&& self
.orchestrator
.supported_service_types()
.contains(&service.service_type().to_string())
})
.collect::<Vec<_>>();
// Start all services
for service in services {
let service_definition = ServiceDefinition {
circuit: circuit_name.into(),
service_id: service.service_id().into(),
service_type: service.service_type().into(),
};
let service_arguments = service
.arguments()
.iter()
.map(|(key, value)| (key.clone(), value.clone()))
.collect();
self.orchestrator
.initialize_service(service_definition.clone(), service_arguments)?;
self.running_services.insert(service_definition);
}
}
Ok(())
}
fn update_splinter_state(&self, circuit: &Circuit) -> Result<(), AdminSharedError> {
let members: Vec<StateNode> = circuit
.get_members()
.iter()
.map(|node| {
StateNode::new(
node.get_node_id().to_string(),
vec![node.get_endpoint().to_string()],
)
})
.collect();
let roster = circuit.get_roster().iter().map(|service| {
StateServiceDefinition::builder(
service.get_service_id().to_string(),
service.get_service_type().to_string(),
)
.with_allowed_nodes(service.get_allowed_nodes().to_vec())
.with_arguments(
service
.get_arguments()
.iter()
.map(|argument| {
(
argument.get_key().to_string(),
argument.get_value().to_string(),
)
})
.collect::<BTreeMap<String, String>>(),
)
.build()
});
let auth = match circuit.get_authorization_type() {
Circuit_AuthorizationType::TRUST_AUTHORIZATION => AuthorizationType::Trust,
// This should never happen
Circuit_AuthorizationType::UNSET_AUTHORIZATION_TYPE => {
return Err(AdminSharedError::CommitError(
"Missing authorization type on circuit commit".to_string(),
))
}
};
let persistence = match circuit.get_persistence() {
Circuit_PersistenceType::ANY_PERSISTENCE => PersistenceType::Any,
// This should never happen
Circuit_PersistenceType::UNSET_PERSISTENCE_TYPE => {
return Err(AdminSharedError::CommitError(
"Missing persistence type on circuit commit".to_string(),
))
}
};
let durability = match circuit.get_durability() {
Circuit_DurabilityType::NO_DURABILITY => DurabilityType::NoDurability,
// This should never happen
Circuit_DurabilityType::UNSET_DURABILITY_TYPE => {
return Err(AdminSharedError::CommitError(
"Missing durabilty type on circuit commit".to_string(),
))
}
};
let routes = match circuit.get_routes() {
Circuit_RouteType::ANY_ROUTE => RouteType::Any,
// This should never happen
Circuit_RouteType::UNSET_ROUTE_TYPE => {
return Err(AdminSharedError::CommitError(
"Missing route type on circuit commit".to_string(),
))
}
};
let new_circuit = StateCircuit::builder()
.with_id(circuit.get_circuit_id().to_string())
.with_members(
members
.iter()
.map(|node| node.id().to_string())
.collect::<Vec<String>>(),
)
.with_roster(roster.clone())
.with_auth(auth)
.with_persistence(persistence)
.with_durability(durability)
.with_routes(routes)
.with_circuit_management_type(circuit.get_circuit_management_type().to_string())
.build()
.map_err(|err| {
AdminSharedError::CommitError(format!("Unable build new circuit: {}", err))
})?;
let mut splinter_state = self.splinter_state.write().map_err(|err| {
AdminSharedError::CommitError(format!("Unable to unlock splinter state: {}", err))
})?;
for member in members {
splinter_state
.add_node(member.id().to_string(), member)
.map_err(|err| {
AdminSharedError::CommitError(format!(
"Unable to add node to splinter state: {}",
err
))
})?;
}
splinter_state
.add_circuit(new_circuit.id().to_string(), new_circuit)
.map_err(|err| {
AdminSharedError::CommitError(format!(
"Unable to add circuit to splinter state: {}",
err
))
})?;
for service in roster {
if service.allowed_nodes().contains(&self.node_id) {
continue;
}
let unique_id = ServiceId::new(
circuit.circuit_id.to_string(),
service.service_id().to_string(),
);
let allowed_node = &service.allowed_nodes()[0];
if let Some(member) = splinter_state.node(&allowed_node) {
let service = Service::new(service.service_id().to_string(), None, member.clone());
splinter_state.add_service(unique_id, service)
} else {
return Err(AdminSharedError::CommitError(format!(
"Unable to find allowed node {} when adding service {} to directory",
allowed_node,
service.service_id()
)));
}
}
Ok(())
}
pub fn add_services_to_directory(&self) -> Result<(), AdminSharedError> {
let mut splinter_state = self.splinter_state.write().map_err(|err| {
AdminSharedError::CommitError(format!("Unable to unlock splinter state: {}", err))
})?;
for (id, circuit) in splinter_state.circuits().clone() {
for service in circuit.roster() {
if service.allowed_nodes().contains(&self.node_id) {
continue;
}
let unique_id = ServiceId::new(id.to_string(), service.service_id().to_string());
let allowed_node = &service.allowed_nodes()[0];
if let Some(member) = splinter_state.node(&allowed_node) {
// rebuild Node with id
let node =
StateNode::new(allowed_node.to_string(), member.endpoints().to_vec());
let service = Service::new(service.service_id().to_string(), None, node);
splinter_state.add_service(unique_id, service)
} else {
return Err(AdminSharedError::CommitError(format!(
"Unable to find allowed node {} when adding service {} to directory",
allowed_node,
service.service_id()
)));
}
}
}
Ok(())
}
fn verify_signature(&self, payload: &CircuitManagementPayload) -> Result<bool, ServiceError> {
let header =
protobuf::parse_from_bytes::<CircuitManagementPayload_Header>(payload.get_header())?;
let signature = payload.get_signature();
let public_key = header.get_requester();
self.signature_verifier
.verify(&payload.get_header(), &signature, &public_key)
.map_err(|err| ServiceError::UnableToHandleMessage(Box::new(err)))
}
}
#[cfg(test)]
mod tests {
use super::*;
use protobuf::{Message, RepeatedField};
use crate::circuit::directory::CircuitDirectory;
use crate::keys::{
insecure::AllowAllKeyPermissionManager, storage::StorageKeyRegistry, KeyInfo,
};
use crate::mesh::Mesh;
use crate::network::{
auth::{AuthorizationCallback, AuthorizationCallbackError},
Network,
};
use crate::protos::admin;
use crate::protos::admin::{SplinterNode, SplinterService};
use crate::protos::authorization::{
AuthorizationMessage, AuthorizationMessageType, AuthorizedMessage,
};
use crate::protos::network::{NetworkMessage, NetworkMessageType};
use crate::signing::{
hash::{HashSigner, HashVerifier},
Signer,
};
use crate::storage::get_storage;
use crate::transport::{
ConnectError, Connection, DisconnectError, RecvError, SendError, Transport,
};
/// Test that the CircuitManagementPayload is moved to the pending payloads when the peers are
/// fully authorized.
#[test]
fn test_auth_change() {
let mesh = Mesh::new(4, 16);
let network = Network::new(mesh.clone(), 0).unwrap();
let mut transport = MockConnectingTransport::expect_connections(vec![
Ok(Box::new(MockConnection::new())),
Ok(Box::new(MockConnection::new())),
Ok(Box::new(MockConnection::new())),
]);
let orchestrator_connection = transport
.connect("inproc://admin-service")
.expect("failed to create connection");
let orchestrator = ServiceOrchestrator::new(vec![], orchestrator_connection, 1, 1, 1)
.expect("failed to create orchestrator");
let peer_connector = PeerConnector::new(network.clone(), Box::new(transport));
let state = setup_splinter_state();
let key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let mut shared = AdminServiceShared::new(
"my_peer_id".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let mut circuit = admin::Circuit::new();
circuit.set_circuit_id("test_propose_circuit".into());
circuit.set_authorization_type(admin::Circuit_AuthorizationType::TRUST_AUTHORIZATION);
circuit.set_persistence(admin::Circuit_PersistenceType::ANY_PERSISTENCE);
circuit.set_routes(admin::Circuit_RouteType::ANY_ROUTE);
circuit.set_durability(admin::Circuit_DurabilityType::NO_DURABILITY);
circuit.set_circuit_management_type("test app auth handler".into());
circuit.set_members(protobuf::RepeatedField::from_vec(vec![
splinter_node("test-node", "tcp://someplace:8000"),
splinter_node("other-node", "tcp://otherplace:8000"),
]));
circuit.set_roster(protobuf::RepeatedField::from_vec(vec![
splinter_service("service-a", "sabre"),
splinter_service("service-b", "sabre"),
]));
let mut request = admin::CircuitCreateRequest::new();
request.set_circuit(circuit);
let mut header = admin::CircuitManagementPayload_Header::new();
header.set_action(admin::CircuitManagementPayload_Action::CIRCUIT_CREATE_REQUEST);
let mut payload = admin::CircuitManagementPayload::new();
payload.set_signature(Vec::new());
payload.set_header(protobuf::Message::write_to_bytes(&header).unwrap());
payload.set_circuit_create_request(request);
shared
.propose_circuit(payload)
.expect("Proposal not accepted");
// None of the proposed members are peered
assert_eq!(0, shared.pending_circuit_payloads.len());
shared.on_authorization_change("test-node", PeerAuthorizationState::Authorized);
// One node is still unpeered
assert_eq!(0, shared.pending_circuit_payloads.len());
shared.on_authorization_change("other-node", PeerAuthorizationState::Authorized);
// We're fully peered, so the pending payload is now available
assert_eq!(1, shared.pending_circuit_payloads.len());
}
/// Test that the CircuitManagementPayload message is dropped, if a node fails authorization.
#[test]
fn test_unauth_change() {
let mesh = Mesh::new(4, 16);
let network = Network::new(mesh.clone(), 0).unwrap();
let mut transport = MockConnectingTransport::expect_connections(vec![
Ok(Box::new(MockConnection::new())),
Ok(Box::new(MockConnection::new())),
Ok(Box::new(MockConnection::new())),
]);
let orchestrator_connection = transport
.connect("inproc://admin-service")
.expect("failed to create connection");
let orchestrator = ServiceOrchestrator::new(vec![], orchestrator_connection, 1, 1, 1)
.expect("failed to create orchestrator");
let peer_connector = PeerConnector::new(network.clone(), Box::new(transport));
let state = setup_splinter_state();
let key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let mut shared = AdminServiceShared::new(
"my_peer_id".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let mut circuit = admin::Circuit::new();
circuit.set_circuit_id("test_propose_circuit".into());
circuit.set_authorization_type(admin::Circuit_AuthorizationType::TRUST_AUTHORIZATION);
circuit.set_persistence(admin::Circuit_PersistenceType::ANY_PERSISTENCE);
circuit.set_routes(admin::Circuit_RouteType::ANY_ROUTE);
circuit.set_circuit_management_type("test app auth handler".into());
circuit.set_members(protobuf::RepeatedField::from_vec(vec![
splinter_node("test-node", "tcp://someplace:8000"),
splinter_node("other-node", "tcp://otherplace:8000"),
]));
circuit.set_roster(protobuf::RepeatedField::from_vec(vec![
splinter_service("service-a", "sabre"),
splinter_service("service-b", "sabre"),
]));
let mut request = admin::CircuitCreateRequest::new();
request.set_circuit(circuit);
let mut header = admin::CircuitManagementPayload_Header::new();
header.set_action(admin::CircuitManagementPayload_Action::CIRCUIT_CREATE_REQUEST);
let mut payload = admin::CircuitManagementPayload::new();
payload.set_signature(Vec::new());
payload.set_header(protobuf::Message::write_to_bytes(&header).unwrap());
payload.set_circuit_create_request(request);
shared
.propose_circuit(payload)
.expect("Proposal not accepted");
// None of the proposed members are peered
assert_eq!(1, shared.unpeered_payloads.len());
assert_eq!(0, shared.pending_circuit_payloads.len());
shared.on_authorization_change("test-node", PeerAuthorizationState::Unauthorized);
// The message should be dropped
assert_eq!(0, shared.pending_circuit_payloads.len());
assert_eq!(0, shared.unpeered_payloads.len());
}
#[test]
// test that a valid circuit is validated correctly
fn test_validate_circuit_valid() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let circuit = setup_test_circuit();
if let Err(err) = admin_shared.validate_create_circuit(&circuit, b"test_signer_a", "node_a")
{
panic!("Should have been valid: {}", err);
}
}
#[test]
// test that if a circuit is proposed by a signer key that is not registered the proposal is
// invalid
fn test_validate_circuit_signer_key_not_registered() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
let key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let circuit = setup_test_circuit();
if let Ok(_) = admin_shared.validate_create_circuit(&circuit, b"test_signer_a", "node_a") {
panic!("Should have been invalid due to signer not being registered to a node");
}
}
#[test]
// test that if a circuit has a service in its roster with an allowed node that is not in
// members an error is returned
fn test_validate_circuit_bad_node() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let mut circuit = setup_test_circuit();
let mut service_bad = SplinterService::new();
service_bad.set_service_id("service_b".to_string());
service_bad.set_service_type("type_a".to_string());
service_bad.set_allowed_nodes(RepeatedField::from_vec(vec!["node_bad".to_string()]));
circuit.set_roster(RepeatedField::from_vec(vec![service_bad]));
if let Ok(_) = admin_shared.validate_create_circuit(&circuit, b"test_signer_a", "node_a") {
panic!("Should have been invalid due to service having an allowed node not in members");
}
}
#[test]
// test that if a circuit has a service in its roster with too many allowed nodes
fn test_validate_circuit_too_many_allowed_nodes() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let mut circuit = setup_test_circuit();
let mut service_bad = SplinterService::new();
service_bad.set_service_id("service_b".to_string());
service_bad.set_service_type("type_a".to_string());
service_bad.set_allowed_nodes(RepeatedField::from_vec(vec![
"node_b".to_string(),
"extra".to_string(),
]));
circuit.set_roster(RepeatedField::from_vec(vec![service_bad]));
if let Ok(_) = admin_shared.validate_create_circuit(&circuit, b"test_signer_a", "node_a") {
panic!("Should have been invalid due to service having too many allowed nodes");
}
}
#[test]
// test that if a circuit has a service with "" for a service id an error is returned
fn test_validate_circuit_empty_service_id() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let mut circuit = setup_test_circuit();
let mut service_ = SplinterService::new();
service_.set_service_id("".to_string());
service_.set_service_type("type_a".to_string());
service_.set_allowed_nodes(RepeatedField::from_vec(vec!["node_a".to_string()]));
circuit.set_roster(RepeatedField::from_vec(vec![service_]));
if let Ok(_) = admin_shared.validate_create_circuit(&circuit, b"test_signer_a", "node_a") {
panic!("Should have been invalid due to service's id being empty");
}
}
#[test]
// test that if a circuit has a service with duplicate service ids an error is returned
fn test_validate_circuit_duplicate_service_id() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let mut circuit = setup_test_circuit();
let mut service_a = SplinterService::new();
service_a.set_service_id("service_a".to_string());
service_a.set_service_type("type_a".to_string());
service_a.set_allowed_nodes(RepeatedField::from_vec(vec!["node_a".to_string()]));
let mut service_a2 = SplinterService::new();
service_a2.set_service_id("service_a".to_string());
service_a2.set_service_type("type_a".to_string());
service_a2.set_allowed_nodes(RepeatedField::from_vec(vec!["node_b".to_string()]));
circuit.set_roster(RepeatedField::from_vec(vec![service_a, service_a2]));
if let Ok(_) = admin_shared.validate_create_circuit(&circuit, b"test_signer_a", "node_a") {
panic!("Should have been invalid due to service's id being a duplicate");
}
}
#[test]
// test that if a circuit does not have any services in its roster an error is returned
fn test_validate_circuit_empty_roster() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let mut circuit = setup_test_circuit();
circuit.set_roster(RepeatedField::from_vec(vec![]));
if let Ok(_) = admin_shared.validate_create_circuit(&circuit, b"test_signer_a", "node_a") {
panic!("Should have been invalid due to empty roster");
}
}
#[test]
// test that if a circuit does not have any nodes in its members an error is returned
fn test_validate_circuit_empty_members() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let mut circuit = setup_test_circuit();
circuit.set_members(RepeatedField::from_vec(vec![]));
if let Ok(_) = admin_shared.validate_create_circuit(&circuit, b"test_signer_a", "node_a") {
panic!("Should have been invalid empty members");
}
}
#[test]
// test that if a circuit does not have the local node in the member list an error is
// returned
fn test_validate_circuit_missing_local_node() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let mut circuit = setup_test_circuit();
let mut node_b = SplinterNode::new();
node_b.set_node_id("node_b".to_string());
node_b.set_endpoint("test://endpoint_b:0".to_string());
circuit.set_members(RepeatedField::from_vec(vec![node_b]));
if let Ok(_) = admin_shared.validate_create_circuit(&circuit, b"test_signer_a", "node_a") {
panic!("Should have been invalid because node_a is not in members");
}
}
#[test]
// test that if a circuit has a member with node id of "" an error is
// returned
fn test_validate_circuit_empty_node_id() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let mut circuit = setup_test_circuit();
let mut node_a = SplinterNode::new();
node_a.set_node_id("node_a".to_string());
node_a.set_endpoint("test://endpoint_a:0".to_string());
let mut node_b = SplinterNode::new();
node_b.set_node_id("node_b".to_string());
node_b.set_endpoint("test://endpoint_b:0".to_string());
let mut node_ = SplinterNode::new();
node_.set_node_id("".to_string());
node_.set_endpoint("test://endpoint_:0".to_string());
circuit.set_members(RepeatedField::from_vec(vec![node_a, node_b, node_]));
if let Ok(_) = admin_shared.validate_create_circuit(&circuit, b"test_signer_a", "node_a") {
panic!("Should have been invalid because node_ is has an empty node id");
}
}
#[test]
// test that if a circuit has duplicate members an error is returned
fn test_validate_circuit_duplicate_members() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let mut circuit = setup_test_circuit();
let mut node_a = SplinterNode::new();
node_a.set_node_id("node_a".to_string());
node_a.set_endpoint("test://endpoint_a:0".to_string());
let mut node_b = SplinterNode::new();
node_b.set_node_id("node_b".to_string());
node_b.set_endpoint("test://endpoint_b:0".to_string());
let mut node_b2 = SplinterNode::new();
node_b2.set_node_id("node_b".to_string());
node_b2.set_endpoint("test://endpoint_b2:0".to_string());
circuit.set_members(RepeatedField::from_vec(vec![node_a, node_b, node_b2]));
if let Ok(_) = admin_shared.validate_create_circuit(&circuit, b"test_signer_a", "node_a") {
panic!("Should have been invalid because there are duplicate members");
}
}
#[test]
// test that if a circuit has a member with an empty endpoint an error is returned
fn test_validate_circuit_empty_endpoint() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let mut circuit = setup_test_circuit();
let mut node_a = SplinterNode::new();
node_a.set_node_id("node_a".to_string());
node_a.set_endpoint("test://endpoint_a:0".to_string());
let mut node_b = SplinterNode::new();
node_b.set_node_id("node_b".to_string());
node_b.set_endpoint("".to_string());
circuit.set_members(RepeatedField::from_vec(vec![node_a, node_b]));
if let Ok(_) = admin_shared.validate_create_circuit(&circuit, b"test_signer_a", "node_a") {
panic!("Should have been invalid because a member has an empty endpoint");
}
}
#[test]
// test that if a circuit has a member with a duplicate endpoint an error is returned
fn test_validate_circuit_duplicate_endpoint() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let mut circuit = setup_test_circuit();
let mut node_a = SplinterNode::new();
node_a.set_node_id("node_a".to_string());
node_a.set_endpoint("test://endpoint_a:0".to_string());
let mut node_b = SplinterNode::new();
node_b.set_node_id("node_b".to_string());
node_b.set_endpoint("test://endpoint_a:0".to_string());
circuit.set_members(RepeatedField::from_vec(vec![node_a, node_b]));
if let Ok(_) = admin_shared.validate_create_circuit(&circuit, b"test_signer_a", "node_a") {
panic!("Should have been invalid because a member has a duplicate endpoint");
}
}
#[test]
// test that if a circuit does not have authorization set an error is returned
fn test_validate_circuit_no_authorization() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let mut circuit = setup_test_circuit();
circuit.set_authorization_type(Circuit_AuthorizationType::UNSET_AUTHORIZATION_TYPE);
if let Ok(_) = admin_shared.validate_create_circuit(&circuit, b"test_signer_a", "node_a") {
panic!("Should have been invalid because authorizaiton type is unset");
}
}
#[test]
// test that if a circuit does not have persistence set an error is returned
fn test_validate_circuit_no_persitance() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let mut circuit = setup_test_circuit();
circuit.set_persistence(Circuit_PersistenceType::UNSET_PERSISTENCE_TYPE);
if let Ok(_) = admin_shared.validate_create_circuit(&circuit, b"test_signer_a", "node_a") {
panic!("Should have been invalid because persistence type is unset");
}
}
#[test]
// test that if a circuit does not have durability set an error is returned
fn test_validate_circuit_unset_durability() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let mut circuit = setup_test_circuit();
circuit.set_durability(Circuit_DurabilityType::UNSET_DURABILITY_TYPE);
if let Ok(_) = admin_shared.validate_create_circuit(&circuit, b"test_signer_a", "node_a") {
panic!("Should have been invalid because durabilty type is unset");
}
}
#[test]
// test that if a circuit does not have route type set an error is returned
fn test_validate_circuit_no_routes() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let mut circuit = setup_test_circuit();
circuit.set_routes(Circuit_RouteType::UNSET_ROUTE_TYPE);
if let Ok(_) = admin_shared.validate_create_circuit(&circuit, b"test_signer_a", "node_a") {
panic!("Should have been invalid because route type is unset");
}
}
#[test]
// test that if a circuit does not have circuit_management_type set an error is returned
fn test_validate_circuit_no_management_type() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let mut circuit = setup_test_circuit();
circuit.set_circuit_management_type("".to_string());
if let Ok(_) = admin_shared.validate_create_circuit(&circuit, b"test_signer_a", "node_a") {
panic!("Should have been invalid because route type is unset");
}
}
#[test]
// test that a valid circuit proposal vote comes back as valid
fn test_validate_proposal_vote_valid() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let circuit = setup_test_circuit();
let vote = setup_test_vote(&circuit);
let proposal = setup_test_proposal(&circuit);
if let Err(err) =
admin_shared.validate_circuit_vote(&vote, b"test_signer_a", &proposal, "node_a")
{
panic!("Should have been valid: {}", err);
}
}
#[test]
// test that if the signer of the vote is not registered to a node the vote is invalid
fn test_validate_proposal_vote_node_not_registered() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let circuit = setup_test_circuit();
let vote = setup_test_vote(&circuit);
let proposal = setup_test_proposal(&circuit);
if let Ok(_) =
admin_shared.validate_circuit_vote(&vote, b"test_signer_a", &proposal, "node_a")
{
panic!("Should have been invalid because signer is not registered for a node");
}
}
#[test]
// test if the voter is registered to the original requester node the vote is invalid
fn test_validate_proposal_vote_requester() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_b".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let circuit = setup_test_circuit();
let vote = setup_test_vote(&circuit);
let proposal = setup_test_proposal(&circuit);
if let Ok(_) =
admin_shared.validate_circuit_vote(&vote, b"test_signer_a", &proposal, "node_a")
{
panic!("Should have been invalid because signer registered for the requester node");
}
}
#[test]
// test if a voter has already voted on a proposal the new vote is invalid
fn test_validate_proposal_vote_duplicate_vote() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let circuit = setup_test_circuit();
let vote = setup_test_vote(&circuit);
let mut proposal = setup_test_proposal(&circuit);
let mut vote_record = CircuitProposal_VoteRecord::new();
vote_record.set_vote(CircuitProposalVote_Vote::ACCEPT);
vote_record.set_public_key(b"test_signer_a".to_vec());
vote_record.set_voter_node_id("node_a".to_string());
proposal.set_votes(RepeatedField::from_vec(vec![vote_record]));
if let Ok(_) =
admin_shared.validate_circuit_vote(&vote, b"test_signer_a", &proposal, "node_a")
{
panic!("Should have been invalid because node as already submited a vote");
}
}
#[test]
// test that if the circuit hash in the circuit proposal does not match the circuit hash on
// the vote, the vote is invalid
fn test_validate_proposal_vote_circuit_hash_mismatch() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let admin_shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let circuit = setup_test_circuit();
let vote = setup_test_vote(&circuit);
let mut proposal = setup_test_proposal(&circuit);
proposal.set_circuit_hash("bad_hash".to_string());
if let Ok(_) =
admin_shared.validate_circuit_vote(&vote, b"test_signer_a", &proposal, "node_a")
{
panic!("Should have been invalid because the circuit hash does not match");
}
}
#[test]
// test that the validate_circuit_management_payload method returns an error in case the
// signature is empty.
fn test_validate_circuit_management_payload_signature() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let circuit = setup_test_circuit();
let mut request = admin::CircuitCreateRequest::new();
request.set_circuit(circuit);
let mut header = admin::CircuitManagementPayload_Header::new();
header.set_action(admin::CircuitManagementPayload_Action::CIRCUIT_CREATE_REQUEST);
header.set_requester(b"test_signer_b".to_vec());
header.set_requester_node_id("node_b".to_string());
let mut payload = admin::CircuitManagementPayload::new();
payload.set_signature(Vec::new());
payload.set_header(protobuf::Message::write_to_bytes(&header).unwrap());
payload.set_circuit_create_request(request);
// Asserting the payload will be deemed invalid as the signature is an empty vec.
if let Ok(_) = shared.validate_circuit_management_payload(&payload, &header) {
panic!("Should have been invalid due to empty signature");
}
payload.set_signature(HashSigner.sign(&payload.header).unwrap());
// Asserting the payload passed through validation.
if let Err(_) = shared.validate_circuit_management_payload(&payload, &header) {
panic!("Should have been valid");
}
}
#[test]
// test that the validate_circuit_management_payload method returns an error in case the header is empty.
fn test_validate_circuit_management_payload_header() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let circuit = setup_test_circuit();
let mut request = admin::CircuitCreateRequest::new();
request.set_circuit(circuit);
let mut header = admin::CircuitManagementPayload_Header::new();
header.set_action(admin::CircuitManagementPayload_Action::CIRCUIT_CREATE_REQUEST);
header.set_requester(b"test_signer_b".to_vec());
header.set_requester_node_id("node_b".to_string());
let mut payload = admin::CircuitManagementPayload::new();
payload.set_signature(HashSigner.sign(&payload.header).unwrap());
payload.set_circuit_create_request(request);
// Asserting the payload will be deemed invalid as the header is empty.
match shared.validate_circuit_management_payload(&payload, &header) {
Err(err) => assert!(err
.to_string()
.contains("CircuitManagementPayload header must be set")),
_ => panic!("Should have been invalid because empty requester field"),
}
payload.set_header(protobuf::Message::write_to_bytes(&header).unwrap());
// Asserting the payload passed through validation, and failed at a further step.
if let Err(_) = shared.validate_circuit_management_payload(&payload, &header) {
panic!("Should have been valid");
}
}
#[test]
// test that the validate_circuit_management_payload method returns an error in case the header
// requester field is empty.
fn test_validate_circuit_management_header_requester() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let circuit = setup_test_circuit();
let mut request = admin::CircuitCreateRequest::new();
request.set_circuit(circuit);
let mut header = admin::CircuitManagementPayload_Header::new();
header.set_action(admin::CircuitManagementPayload_Action::CIRCUIT_CREATE_REQUEST);
header.set_requester_node_id("node_b".to_string());
let mut payload = admin::CircuitManagementPayload::new();
payload.set_signature(HashSigner.sign(&payload.header).unwrap());
payload.set_circuit_create_request(request);
payload.set_header(protobuf::Message::write_to_bytes(&header).unwrap());
// Asserting the payload will be deemed invalid as the header is empty.
match shared.validate_circuit_management_payload(&payload, &header) {
Err(err) => assert!(err
.to_string()
.contains("CircuitManagementPayload must have a requester")),
_ => panic!("Should have been invalid because empty requester field"),
}
header.set_requester(b"test_signer_b".to_vec());
payload.set_header(protobuf::Message::write_to_bytes(&header).unwrap());
// Asserting the payload passed through validation, and failed at a further step.
if let Err(_) = shared.validate_circuit_management_payload(&payload, &header) {
panic!("Should have been valid");
}
}
#[test]
// test that the CircuitManagementPayload returns an error in case the header requester_node_id
// field is empty.
fn test_validate_circuit_management_header_requester_node_id() {
let state = setup_splinter_state();
let peer_connector = setup_peer_connector();
let orchestrator = setup_orchestrator();
// set up key registry
let mut key_registry = StorageKeyRegistry::new("memory".to_string()).unwrap();
let key_info = KeyInfo::builder(b"test_signer_a".to_vec(), "node_a".to_string()).build();
key_registry.save_key(key_info).unwrap();
let shared = AdminServiceShared::new(
"node_a".into(),
orchestrator,
peer_connector,
Box::new(MockAuthInquisitor),
state,
Box::new(HashVerifier),
Box::new(key_registry),
Box::new(AllowAllKeyPermissionManager),
"memory",
)
.unwrap();
let circuit = setup_test_circuit();
let mut request = admin::CircuitCreateRequest::new();
request.set_circuit(circuit);
let mut header = admin::CircuitManagementPayload_Header::new();
header.set_action(admin::CircuitManagementPayload_Action::CIRCUIT_CREATE_REQUEST);
header.set_requester(b"test_signer_b".to_vec());
let mut payload = admin::CircuitManagementPayload::new();
payload.set_signature(HashSigner.sign(&payload.header).unwrap());
payload.set_circuit_create_request(request);
payload.set_header(protobuf::Message::write_to_bytes(&header).unwrap());
// Asserting the payload will be deemed invalid as the header is empty.
match shared.validate_circuit_management_payload(&payload, &header) {
Err(err) => assert!(err
.to_string()
.contains("CircuitManagementPayload must have a requester node id")),
_ => panic!("Should have been invalid because empty requester field"),
}
header.set_requester_node_id("node_b".to_string());
payload.set_header(protobuf::Message::write_to_bytes(&header).unwrap());
// Asserting the payload passed through validation, and failed at a further step.
if let Err(_) = shared.validate_circuit_management_payload(&payload, &header) {
panic!("Should have been valid");
}
}
pub fn setup_test_circuit() -> Circuit {
let mut service_a = SplinterService::new();
service_a.set_service_id("service_a".to_string());
service_a.set_service_type("type_a".to_string());
service_a.set_allowed_nodes(RepeatedField::from_vec(vec!["node_a".to_string()]));
let mut service_b = SplinterService::new();
service_b.set_service_id("service_b".to_string());
service_b.set_service_type("type_a".to_string());
service_b.set_allowed_nodes(RepeatedField::from_vec(vec!["node_b".to_string()]));
let mut node_a = SplinterNode::new();
node_a.set_node_id("node_a".to_string());
node_a.set_endpoint("test://endpoint_a:0".to_string());
let mut node_b = SplinterNode::new();
node_b.set_node_id("node_b".to_string());
node_b.set_endpoint("test://endpoint_b:0".to_string());
let mut circuit = Circuit::new();
circuit.set_circuit_id("alpha".to_string());
circuit.set_members(RepeatedField::from_vec(vec![node_a, node_b]));
circuit.set_roster(RepeatedField::from_vec(vec![service_b, service_a]));
circuit.set_authorization_type(Circuit_AuthorizationType::TRUST_AUTHORIZATION);
circuit.set_persistence(Circuit_PersistenceType::ANY_PERSISTENCE);
circuit.set_durability(Circuit_DurabilityType::NO_DURABILITY);
circuit.set_routes(Circuit_RouteType::ANY_ROUTE);
circuit.set_circuit_management_type("test_circuit".to_string());
circuit.set_application_metadata(b"test_data".to_vec());
circuit
}
fn setup_test_vote(circuit: &Circuit) -> CircuitProposalVote {
let mut circuit_vote = CircuitProposalVote::new();
circuit_vote.set_vote(CircuitProposalVote_Vote::ACCEPT);
circuit_vote.set_circuit_id(circuit.get_circuit_id().to_string());
let circuit_hash = sha256(circuit).unwrap();
circuit_vote.set_circuit_hash(circuit_hash);
circuit_vote
}
fn setup_test_proposal(proposed_circuit: &Circuit) -> CircuitProposal {
let mut circuit_proposal = CircuitProposal::new();
circuit_proposal.set_proposal_type(CircuitProposal_ProposalType::CREATE);
circuit_proposal.set_circuit_id(proposed_circuit.get_circuit_id().into());
circuit_proposal.set_circuit_hash(sha256(proposed_circuit).unwrap());
circuit_proposal.set_circuit_proposal(proposed_circuit.clone());
circuit_proposal.set_requester(b"test_signer_b".to_vec());
circuit_proposal.set_requester_node_id("node_b".to_string());
circuit_proposal
}
fn setup_splinter_state() -> Arc<RwLock<SplinterState>> {
let mut storage = get_storage("memory", CircuitDirectory::new).unwrap();
let circuit_directory = storage.write().clone();
let state = Arc::new(RwLock::new(SplinterState::new(
"memory".to_string(),
circuit_directory,
)));
state
}
fn setup_peer_connector() -> PeerConnector {
let mesh = Mesh::new(4, 16);
let network = Network::new(mesh.clone(), 0).unwrap();
let transport = MockConnectingTransport::expect_connections(vec![
Ok(Box::new(MockConnection::new())),
Ok(Box::new(MockConnection::new())),
]);
let peer_connector = PeerConnector::new(network.clone(), Box::new(transport));
peer_connector
}
fn setup_orchestrator() -> ServiceOrchestrator {
let mut transport =
MockConnectingTransport::expect_connections(vec![Ok(Box::new(MockConnection::new()))]);
let orchestrator_connection = transport
.connect("inproc://admin-service")
.expect("failed to create connection");
ServiceOrchestrator::new(vec![], orchestrator_connection, 1, 1, 1)
.expect("failed to create orchestrator")
}
fn splinter_node(node_id: &str, endpoint: &str) -> admin::SplinterNode {
let mut node = admin::SplinterNode::new();
node.set_node_id(node_id.into());
node.set_endpoint(endpoint.into());
node
}
fn splinter_service(service_id: &str, service_type: &str) -> admin::SplinterService {
let mut service = admin::SplinterService::new();
service.set_service_id(service_id.into());
service.set_service_type(service_type.into());
service
}
struct MockAuthInquisitor;
impl AuthorizationInquisitor for MockAuthInquisitor {
fn is_authorized(&self, _: &str) -> bool {
false
}
fn register_callback(
&self,
_: Box<dyn AuthorizationCallback>,
) -> Result<(), AuthorizationCallbackError> {
unimplemented!();
}
}
struct MockConnectingTransport {
connection_results: VecDeque<Result<Box<dyn Connection>, ConnectError>>,
}
impl MockConnectingTransport {
fn expect_connections(results: Vec<Result<Box<dyn Connection>, ConnectError>>) -> Self {
Self {
connection_results: results.into_iter().collect(),
}
}
}
impl Transport for MockConnectingTransport {
fn accepts(&self, _: &str) -> bool {
true
}
fn connect(&mut self, _: &str) -> Result<Box<dyn Connection>, ConnectError> {
self.connection_results
.pop_front()
.expect("No test result added to mock")
}
fn listen(
&mut self,
_: &str,
) -> Result<Box<dyn crate::transport::Listener>, crate::transport::ListenError> {
panic!("MockConnectingTransport.listen unexpectedly called")
}
}
struct MockConnection {
auth_response: Option<Vec<u8>>,
evented: MockEvented,
}
impl MockConnection {
fn new() -> Self {
Self {
auth_response: Some(authorized_response()),
evented: MockEvented::new(),
}
}
}
impl Connection for MockConnection {
fn send(&mut self, _message: &[u8]) -> Result<(), SendError> {
Ok(())
}
fn recv(&mut self) -> Result<Vec<u8>, RecvError> {
Ok(self.auth_response.take().unwrap_or_else(|| vec![]))
}
fn remote_endpoint(&self) -> String {
String::from("MockConnection")
}
fn local_endpoint(&self) -> String {
String::from("MockConnection")
}
fn disconnect(&mut self) -> Result<(), DisconnectError> {
Ok(())
}
fn evented(&self) -> &dyn mio::Evented {
&self.evented
}
}
struct MockEvented {
registration: mio::Registration,
set_readiness: mio::SetReadiness,
}
impl MockEvented {
fn new() -> Self {
let (registration, set_readiness) = mio::Registration::new2();
Self {
registration,
set_readiness,
}
}
}
impl mio::Evented for MockEvented {
fn register(
&self,
poll: &mio::Poll,
token: mio::Token,
interest: mio::Ready,
opts: mio::PollOpt,
) -> std::io::Result<()> {
self.registration.register(poll, token, interest, opts)?;
self.set_readiness.set_readiness(mio::Ready::readable())?;
Ok(())
}
fn reregister(
&self,
poll: &mio::Poll,
token: mio::Token,
interest: mio::Ready,
opts: mio::PollOpt,
) -> std::io::Result<()> {
self.registration.reregister(poll, token, interest, opts)
}
fn deregister(&self, poll: &mio::Poll) -> std::io::Result<()> {
poll.deregister(&self.registration)
}
}
fn authorized_response() -> Vec<u8> {
let msg_type = AuthorizationMessageType::AUTHORIZE;
let auth_msg = AuthorizedMessage::new();
let mut auth_msg_env = AuthorizationMessage::new();
auth_msg_env.set_message_type(msg_type);
auth_msg_env.set_payload(auth_msg.write_to_bytes().expect("unable to write to bytes"));
let mut network_msg = NetworkMessage::new();
network_msg.set_message_type(NetworkMessageType::AUTHORIZATION);
network_msg.set_payload(
auth_msg_env
.write_to_bytes()
.expect("unable to write to bytes"),
);
network_msg
.write_to_bytes()
.expect("unable to write to bytes")
}
}
| 39.021925 | 115 | 0.591897 |
9b898603d0c88bb1325728c45f5e1fc6ff873b2a | 1,908 | // Copyright 2021 Shift Crypto AG
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::pb::CardanoNetwork;
/// Parameters for Cardano networks.
pub struct Params {
pub network: CardanoNetwork,
pub name: &'static str,
pub unit: &'static str,
/// https://github.com/cardano-foundation/CIPs/blob/6c249ef48f8f5b32efc0ec768fadf4321f3173f2/CIP-0005/CIP-0005.md#miscellaneous
pub bech32_hrp_payment: &'static str,
/// https://github.com/cardano-foundation/CIPs/blob/6c249ef48f8f5b32efc0ec768fadf4321f3173f2/CIP-0019/CIP-0019.md#network-tag
pub network_id: u8,
/// Protocol magic used in Byron addresses for non-mainnet chains.
pub protocol_magic: Option<u32>,
}
const PARAMS_MAINNET: Params = Params {
network: CardanoNetwork::CardanoMainnet,
name: "Cardano",
unit: "ADA",
bech32_hrp_payment: "addr",
network_id: 1,
protocol_magic: None, // it is 764824073, but we don't need the actual value anywhere
};
const PARAMS_TESTNET: Params = Params {
network: CardanoNetwork::CardanoTestnet,
name: "ADA testnet",
unit: "TADA",
bech32_hrp_payment: "addr_test",
network_id: 0,
protocol_magic: Some(1097911063),
};
pub fn get(network: CardanoNetwork) -> &'static Params {
match network {
CardanoNetwork::CardanoMainnet => &PARAMS_MAINNET,
CardanoNetwork::CardanoTestnet => &PARAMS_TESTNET,
}
}
| 35.333333 | 131 | 0.72065 |
fcd0dab26c9516c8c31c1e9dec464324180f7610 | 71,194 | use crate::msgs::enums::{ProtocolVersion, HandshakeType};
use crate::msgs::enums::{CipherSuite, Compression, ExtensionType, ECPointFormat};
use crate::msgs::enums::{HashAlgorithm, SignatureAlgorithm, ServerNameType};
use crate::msgs::enums::{SignatureScheme, KeyUpdateRequest, NamedGroup};
use crate::msgs::enums::{ClientCertificateType, CertificateStatusType};
use crate::msgs::enums::ECCurveType;
use crate::msgs::enums::PSKKeyExchangeMode;
use crate::msgs::base::{Payload, PayloadU8, PayloadU16, PayloadU24};
use crate::msgs::codec;
use crate::msgs::codec::{Codec, Reader};
use crate::key;
#[cfg(feature = "logging")]
use crate::log::warn;
use std::fmt;
use std::io::Write;
use std::collections;
use std::mem;
use untrusted;
use webpki;
macro_rules! declare_u8_vec(
($name:ident, $itemtype:ty) => {
pub type $name = Vec<$itemtype>;
impl Codec for $name {
fn encode(&self, bytes: &mut Vec<u8>) {
codec::encode_vec_u8(bytes, self);
}
fn read(r: &mut Reader) -> Option<$name> {
codec::read_vec_u8::<$itemtype>(r)
}
}
}
);
macro_rules! declare_u16_vec(
($name:ident, $itemtype:ty) => {
pub type $name = Vec<$itemtype>;
impl Codec for $name {
fn encode(&self, bytes: &mut Vec<u8>) {
codec::encode_vec_u16(bytes, self);
}
fn read(r: &mut Reader) -> Option<$name> {
codec::read_vec_u16::<$itemtype>(r)
}
}
}
);
declare_u16_vec!(VecU16OfPayloadU8, PayloadU8);
declare_u16_vec!(VecU16OfPayloadU16, PayloadU16);
#[derive(Debug, PartialEq, Clone)]
pub struct Random([u8; 32]);
static HELLO_RETRY_REQUEST_RANDOM: Random = Random([
0xcf, 0x21, 0xad, 0x74, 0xe5, 0x9a, 0x61, 0x11,
0xbe, 0x1d, 0x8c, 0x02, 0x1e, 0x65, 0xb8, 0x91,
0xc2, 0xa2, 0x11, 0x16, 0x7a, 0xbb, 0x8c, 0x5e,
0x07, 0x9e, 0x09, 0xe2, 0xc8, 0xa8, 0x33, 0x9c,
]);
static ZERO_RANDOM: Random = Random([0u8; 32]);
impl Codec for Random {
fn encode(&self, bytes: &mut Vec<u8>) {
bytes.extend_from_slice(&self.0);
}
fn read(r: &mut Reader) -> Option<Random> {
let bytes = r.take(32)?;
let mut opaque = [0; 32];
opaque.clone_from_slice(bytes);
Some(Random(opaque))
}
}
impl Random {
pub fn from_slice(bytes: &[u8]) -> Random {
let mut rd = Reader::init(bytes);
Random::read(&mut rd).unwrap()
}
pub fn write_slice(&self, mut bytes: &mut [u8]) {
let buf = self.get_encoding();
bytes.write_all(&buf).unwrap();
}
}
#[derive(Copy, Clone)]
pub struct SessionID {
len: usize,
data: [u8; 32],
}
impl fmt::Debug for SessionID {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut t = f.debug_tuple("SessionID");
for i in 0..self.len() {
t.field(&self.data[i]);
}
t.finish()
}
}
impl PartialEq for SessionID {
fn eq(&self, other: &Self) -> bool {
if self.len != other.len {
return false;
}
let mut diff = 0u8;
for i in 0..self.len {
diff |= self.data[i] ^ other.data[i]
}
diff == 0u8
}
}
impl Codec for SessionID {
fn encode(&self, bytes: &mut Vec<u8>) {
debug_assert!(self.len <= 32);
bytes.push(self.len as u8);
bytes.extend_from_slice(&self.data[..self.len]);
}
fn read(r: &mut Reader) -> Option<SessionID> {
let len = u8::read(r)? as usize;
if len > 32 {
return None;
}
let bytes = r.take(len)?;
let mut out = [0u8; 32];
out[..len].clone_from_slice(&bytes[..len]);
Some(SessionID {
data: out,
len,
})
}
}
impl SessionID {
pub fn new(bytes: &[u8]) -> SessionID {
debug_assert!(bytes.len() <= 32);
let mut d = [0u8; 32];
d[..bytes.len()].clone_from_slice(&bytes[..]);
SessionID {
data: d,
len: bytes.len(),
}
}
pub fn empty() -> SessionID {
SessionID {
data: [0u8; 32],
len: 0,
}
}
pub fn len(&self) -> usize {
self.len
}
pub fn is_empty(&self) -> bool {
self.len == 0
}
}
#[derive(Clone, Debug)]
pub struct UnknownExtension {
pub typ: ExtensionType,
pub payload: Payload,
}
impl UnknownExtension {
fn encode(&self, bytes: &mut Vec<u8>) {
self.payload.encode(bytes);
}
fn read(typ: ExtensionType, r: &mut Reader) -> Option<UnknownExtension> {
let payload = Payload::read(r)?;
Some(UnknownExtension {
typ,
payload,
})
}
}
declare_u8_vec!(ECPointFormatList, ECPointFormat);
pub trait SupportedPointFormats {
fn supported() -> ECPointFormatList;
}
impl SupportedPointFormats for ECPointFormatList {
fn supported() -> ECPointFormatList {
vec![ECPointFormat::Uncompressed]
}
}
declare_u16_vec!(NamedGroups, NamedGroup);
declare_u16_vec!(SupportedSignatureSchemes, SignatureScheme);
pub trait DecomposedSignatureScheme {
fn sign(&self) -> SignatureAlgorithm;
fn make(alg: SignatureAlgorithm, hash: HashAlgorithm) -> SignatureScheme;
}
impl DecomposedSignatureScheme for SignatureScheme {
fn sign(&self) -> SignatureAlgorithm {
match *self {
SignatureScheme::RSA_PKCS1_SHA1 |
SignatureScheme::RSA_PKCS1_SHA256 |
SignatureScheme::RSA_PKCS1_SHA384 |
SignatureScheme::RSA_PKCS1_SHA512 |
SignatureScheme::RSA_PSS_SHA256 |
SignatureScheme::RSA_PSS_SHA384 |
SignatureScheme::RSA_PSS_SHA512 => SignatureAlgorithm::RSA,
SignatureScheme::ECDSA_NISTP256_SHA256 |
SignatureScheme::ECDSA_NISTP384_SHA384 |
SignatureScheme::ECDSA_NISTP521_SHA512 => SignatureAlgorithm::ECDSA,
_ => SignatureAlgorithm::Unknown(0),
}
}
fn make(alg: SignatureAlgorithm, hash: HashAlgorithm) -> SignatureScheme {
use crate::msgs::enums::SignatureAlgorithm::{RSA, ECDSA};
use crate::msgs::enums::HashAlgorithm::{SHA1, SHA256, SHA384, SHA512};
match (alg, hash) {
(RSA, SHA1) => SignatureScheme::RSA_PKCS1_SHA1,
(RSA, SHA256) => SignatureScheme::RSA_PKCS1_SHA256,
(RSA, SHA384) => SignatureScheme::RSA_PKCS1_SHA384,
(RSA, SHA512) => SignatureScheme::RSA_PKCS1_SHA512,
(ECDSA, SHA256) => SignatureScheme::ECDSA_NISTP256_SHA256,
(ECDSA, SHA384) => SignatureScheme::ECDSA_NISTP384_SHA384,
(ECDSA, SHA512) => SignatureScheme::ECDSA_NISTP521_SHA512,
(_, _) => unreachable!(),
}
}
}
#[derive(Clone, Debug)]
pub enum ServerNamePayload {
HostName(webpki::DNSName),
Unknown(Payload),
}
impl ServerNamePayload {
fn read_hostname(r: &mut Reader) -> Option<ServerNamePayload> {
let len = u16::read(r)? as usize;
let name = r.take(len)?;
let dns_name = match webpki::DNSNameRef::try_from_ascii(
untrusted::Input::from(name)) {
Ok(dns_name) => dns_name,
Err(_) => {
warn!("Illegal SNI hostname received {:?}", name);
return None;
}
};
Some(ServerNamePayload::HostName(dns_name.into()))
}
fn encode_hostname(name: webpki::DNSNameRef, bytes: &mut Vec<u8>) {
let dns_name_str: &str = name.into();
(dns_name_str.len() as u16).encode(bytes);
bytes.extend_from_slice(dns_name_str.as_bytes());
}
fn encode(&self, bytes: &mut Vec<u8>) {
match *self {
ServerNamePayload::HostName(ref r) => ServerNamePayload::encode_hostname(r.as_ref(), bytes),
ServerNamePayload::Unknown(ref r) => r.encode(bytes),
}
}
}
#[derive(Clone, Debug)]
pub struct ServerName {
pub typ: ServerNameType,
pub payload: ServerNamePayload,
}
impl Codec for ServerName {
fn encode(&self, bytes: &mut Vec<u8>) {
self.typ.encode(bytes);
self.payload.encode(bytes);
}
fn read(r: &mut Reader) -> Option<ServerName> {
let typ = ServerNameType::read(r)?;
let payload = match typ {
ServerNameType::HostName => ServerNamePayload::read_hostname(r)?,
_ => ServerNamePayload::Unknown(Payload::read(r)?),
};
Some(ServerName {
typ,
payload,
})
}
}
declare_u16_vec!(ServerNameRequest, ServerName);
pub trait ConvertServerNameList {
fn get_hostname(&self) -> Option<webpki::DNSNameRef>;
}
impl ConvertServerNameList for ServerNameRequest {
fn get_hostname(&self) -> Option<webpki::DNSNameRef> {
for name in self {
if let ServerNamePayload::HostName(ref dns_name) = name.payload {
return Some(dns_name.as_ref());
}
}
None
}
}
pub type ProtocolNameList = VecU16OfPayloadU8;
pub trait ConvertProtocolNameList {
fn from_slices(names: &[&[u8]]) -> Self;
fn to_vecs(&self) -> Vec<Vec<u8>>;
fn as_single_slice(&self) -> Option<&[u8]>;
}
impl ConvertProtocolNameList for ProtocolNameList {
fn from_slices(names: &[&[u8]]) -> ProtocolNameList {
let mut ret = Vec::new();
for name in names {
ret.push(PayloadU8::new(name.to_vec()));
}
ret
}
fn to_vecs(&self) -> Vec<Vec<u8>> {
let mut ret = Vec::new();
for proto in self {
ret.push(proto.0.clone());
}
ret
}
fn as_single_slice(&self) -> Option<&[u8]> {
if self.len() == 1 {
Some(&self[0].0)
} else {
None
}
}
}
// --- TLS 1.3 Key shares ---
#[derive(Clone, Debug)]
pub struct KeyShareEntry {
pub group: NamedGroup,
pub payload: PayloadU16,
}
impl KeyShareEntry {
pub fn new(group: NamedGroup, payload: &[u8]) -> KeyShareEntry {
KeyShareEntry {
group,
payload: PayloadU16::new(payload.to_vec()),
}
}
}
impl Codec for KeyShareEntry {
fn encode(&self, bytes: &mut Vec<u8>) {
self.group.encode(bytes);
self.payload.encode(bytes);
}
fn read(r: &mut Reader) -> Option<KeyShareEntry> {
let group = NamedGroup::read(r)?;
let payload = PayloadU16::read(r)?;
Some(KeyShareEntry {
group,
payload,
})
}
}
// --- TLS 1.3 PresharedKey offers ---
#[derive(Clone, Debug)]
pub struct PresharedKeyIdentity {
pub identity: PayloadU16,
pub obfuscated_ticket_age: u32,
}
impl PresharedKeyIdentity {
pub fn new(id: Vec<u8>, age: u32) -> PresharedKeyIdentity {
PresharedKeyIdentity {
identity: PayloadU16::new(id),
obfuscated_ticket_age: age,
}
}
}
impl Codec for PresharedKeyIdentity {
fn encode(&self, bytes: &mut Vec<u8>) {
self.identity.encode(bytes);
self.obfuscated_ticket_age.encode(bytes);
}
fn read(r: &mut Reader) -> Option<PresharedKeyIdentity> {
Some(PresharedKeyIdentity {
identity: PayloadU16::read(r)?,
obfuscated_ticket_age: u32::read(r)?,
})
}
}
declare_u16_vec!(PresharedKeyIdentities, PresharedKeyIdentity);
pub type PresharedKeyBinder = PayloadU8;
pub type PresharedKeyBinders = VecU16OfPayloadU8;
#[derive(Clone, Debug)]
pub struct PresharedKeyOffer {
pub identities: PresharedKeyIdentities,
pub binders: PresharedKeyBinders,
}
impl PresharedKeyOffer {
/// Make a new one with one entry.
pub fn new(id: PresharedKeyIdentity, binder: Vec<u8>) -> PresharedKeyOffer {
PresharedKeyOffer {
identities: vec![ id ],
binders: vec![ PresharedKeyBinder::new(binder) ],
}
}
}
impl Codec for PresharedKeyOffer {
fn encode(&self, bytes: &mut Vec<u8>) {
self.identities.encode(bytes);
self.binders.encode(bytes);
}
fn read(r: &mut Reader) -> Option<PresharedKeyOffer> {
Some(PresharedKeyOffer {
identities: PresharedKeyIdentities::read(r)?,
binders: PresharedKeyBinders::read(r)?,
})
}
}
// --- RFC6066 certificate status request ---
type ResponderIDs = VecU16OfPayloadU16;
#[derive(Clone, Debug)]
pub struct OCSPCertificateStatusRequest {
pub responder_ids: ResponderIDs,
pub extensions: PayloadU16,
}
impl Codec for OCSPCertificateStatusRequest {
fn encode(&self, bytes: &mut Vec<u8>) {
CertificateStatusType::OCSP.encode(bytes);
self.responder_ids.encode(bytes);
self.extensions.encode(bytes);
}
fn read(r: &mut Reader) -> Option<OCSPCertificateStatusRequest> {
Some(OCSPCertificateStatusRequest {
responder_ids: ResponderIDs::read(r)?,
extensions: PayloadU16::read(r)?,
})
}
}
#[derive(Clone, Debug)]
pub enum CertificateStatusRequest {
OCSP(OCSPCertificateStatusRequest),
Unknown((CertificateStatusType, Payload))
}
impl Codec for CertificateStatusRequest {
fn encode(&self, bytes: &mut Vec<u8>) {
match *self {
CertificateStatusRequest::OCSP(ref r) => r.encode(bytes),
CertificateStatusRequest::Unknown((typ, ref payload)) => {
typ.encode(bytes);
payload.encode(bytes);
}
}
}
fn read(r: &mut Reader) -> Option<CertificateStatusRequest> {
let typ = CertificateStatusType::read(r)?;
match typ {
CertificateStatusType::OCSP => {
let ocsp_req = OCSPCertificateStatusRequest::read(r)?;
Some(CertificateStatusRequest::OCSP(ocsp_req))
}
_ => {
let data = Payload::read(r)?;
Some(CertificateStatusRequest::Unknown((typ, data)))
}
}
}
}
impl CertificateStatusRequest {
pub fn build_ocsp() -> CertificateStatusRequest {
let ocsp = OCSPCertificateStatusRequest {
responder_ids: ResponderIDs::new(),
extensions: PayloadU16::empty(),
};
CertificateStatusRequest::OCSP(ocsp)
}
}
// ---
// SCTs
pub type SCTList = VecU16OfPayloadU16;
// ---
declare_u8_vec!(PSKKeyExchangeModes, PSKKeyExchangeMode);
declare_u16_vec!(KeyShareEntries, KeyShareEntry);
declare_u8_vec!(ProtocolVersions, ProtocolVersion);
#[derive(Clone, Debug)]
pub enum ClientExtension {
ECPointFormats(ECPointFormatList),
NamedGroups(NamedGroups),
SignatureAlgorithms(SupportedSignatureSchemes),
ServerName(ServerNameRequest),
SessionTicketRequest,
SessionTicketOffer(Payload),
Protocols(ProtocolNameList),
SupportedVersions(ProtocolVersions),
KeyShare(KeyShareEntries),
PresharedKeyModes(PSKKeyExchangeModes),
PresharedKey(PresharedKeyOffer),
Cookie(PayloadU16),
ExtendedMasterSecretRequest,
CertificateStatusRequest(CertificateStatusRequest),
SignedCertificateTimestampRequest,
TransportParameters(Vec<u8>),
EarlyData,
Unknown(UnknownExtension),
}
impl ClientExtension {
pub fn get_type(&self) -> ExtensionType {
match *self {
ClientExtension::ECPointFormats(_) => ExtensionType::ECPointFormats,
ClientExtension::NamedGroups(_) => ExtensionType::EllipticCurves,
ClientExtension::SignatureAlgorithms(_) => ExtensionType::SignatureAlgorithms,
ClientExtension::ServerName(_) => ExtensionType::ServerName,
ClientExtension::SessionTicketRequest |
ClientExtension::SessionTicketOffer(_) => ExtensionType::SessionTicket,
ClientExtension::Protocols(_) => ExtensionType::ALProtocolNegotiation,
ClientExtension::SupportedVersions(_) => ExtensionType::SupportedVersions,
ClientExtension::KeyShare(_) => ExtensionType::KeyShare,
ClientExtension::PresharedKeyModes(_) => ExtensionType::PSKKeyExchangeModes,
ClientExtension::PresharedKey(_) => ExtensionType::PreSharedKey,
ClientExtension::Cookie(_) => ExtensionType::Cookie,
ClientExtension::ExtendedMasterSecretRequest => ExtensionType::ExtendedMasterSecret,
ClientExtension::CertificateStatusRequest(_) => ExtensionType::StatusRequest,
ClientExtension::SignedCertificateTimestampRequest => ExtensionType::SCT,
ClientExtension::TransportParameters(_) => ExtensionType::TransportParameters,
ClientExtension::EarlyData => ExtensionType::EarlyData,
ClientExtension::Unknown(ref r) => r.typ,
}
}
}
impl Codec for ClientExtension {
fn encode(&self, bytes: &mut Vec<u8>) {
self.get_type().encode(bytes);
let mut sub: Vec<u8> = Vec::new();
match *self {
ClientExtension::ECPointFormats(ref r) => r.encode(&mut sub),
ClientExtension::NamedGroups(ref r) => r.encode(&mut sub),
ClientExtension::SignatureAlgorithms(ref r) => r.encode(&mut sub),
ClientExtension::ServerName(ref r) => r.encode(&mut sub),
ClientExtension::SessionTicketRequest |
ClientExtension::ExtendedMasterSecretRequest |
ClientExtension::SignedCertificateTimestampRequest |
ClientExtension::EarlyData => (),
ClientExtension::SessionTicketOffer(ref r) => r.encode(&mut sub),
ClientExtension::Protocols(ref r) => r.encode(&mut sub),
ClientExtension::SupportedVersions(ref r) => r.encode(&mut sub),
ClientExtension::KeyShare(ref r) => r.encode(&mut sub),
ClientExtension::PresharedKeyModes(ref r) => r.encode(&mut sub),
ClientExtension::PresharedKey(ref r) => r.encode(&mut sub),
ClientExtension::Cookie(ref r) => r.encode(&mut sub),
ClientExtension::CertificateStatusRequest(ref r) => r.encode(&mut sub),
ClientExtension::TransportParameters(ref r) => sub.extend_from_slice(r),
ClientExtension::Unknown(ref r) => r.encode(&mut sub),
}
(sub.len() as u16).encode(bytes);
bytes.append(&mut sub);
}
fn read(r: &mut Reader) -> Option<ClientExtension> {
let typ = ExtensionType::read(r)?;
let len = u16::read(r)? as usize;
let mut sub = r.sub(len)?;
Some(match typ {
ExtensionType::ECPointFormats => {
ClientExtension::ECPointFormats(ECPointFormatList::read(&mut sub)?)
}
ExtensionType::EllipticCurves => {
ClientExtension::NamedGroups(NamedGroups::read(&mut sub)?)
}
ExtensionType::SignatureAlgorithms => {
let schemes = SupportedSignatureSchemes::read(&mut sub)?;
ClientExtension::SignatureAlgorithms(schemes)
}
ExtensionType::ServerName => {
ClientExtension::ServerName(ServerNameRequest::read(&mut sub)?)
}
ExtensionType::SessionTicket => {
if sub.any_left() {
ClientExtension::SessionTicketOffer(Payload::read(&mut sub)?)
} else {
ClientExtension::SessionTicketRequest
}
}
ExtensionType::ALProtocolNegotiation => {
ClientExtension::Protocols(ProtocolNameList::read(&mut sub)?)
}
ExtensionType::SupportedVersions => {
ClientExtension::SupportedVersions(ProtocolVersions::read(&mut sub)?)
}
ExtensionType::KeyShare => {
ClientExtension::KeyShare(KeyShareEntries::read(&mut sub)?)
}
ExtensionType::PSKKeyExchangeModes => {
ClientExtension::PresharedKeyModes(PSKKeyExchangeModes::read(&mut sub)?)
}
ExtensionType::PreSharedKey => {
ClientExtension::PresharedKey(PresharedKeyOffer::read(&mut sub)?)
}
ExtensionType::Cookie => ClientExtension::Cookie(PayloadU16::read(&mut sub)?),
ExtensionType::ExtendedMasterSecret if !sub.any_left() => {
ClientExtension::ExtendedMasterSecretRequest
}
ExtensionType::StatusRequest => {
let csr = CertificateStatusRequest::read(&mut sub)?;
ClientExtension::CertificateStatusRequest(csr)
}
ExtensionType::SCT if !sub.any_left() => {
ClientExtension::SignedCertificateTimestampRequest
}
ExtensionType::TransportParameters => {
ClientExtension::TransportParameters(sub.rest().to_vec())
}
ExtensionType::EarlyData if !sub.any_left() => {
ClientExtension::EarlyData
}
_ => ClientExtension::Unknown(UnknownExtension::read(typ, &mut sub)?),
})
}
}
impl ClientExtension {
/// Make a basic SNI ServerNameRequest quoting `hostname`.
pub fn make_sni(dns_name: webpki::DNSNameRef) -> ClientExtension {
let name = ServerName {
typ: ServerNameType::HostName,
payload: ServerNamePayload::HostName(dns_name.into()),
};
ClientExtension::ServerName(vec![ name ])
}
}
#[derive(Clone, Debug)]
pub enum ServerExtension {
ECPointFormats(ECPointFormatList),
ServerNameAck,
SessionTicketAck,
RenegotiationInfo(PayloadU8),
Protocols(ProtocolNameList),
KeyShare(KeyShareEntry),
PresharedKey(u16),
ExtendedMasterSecretAck,
CertificateStatusAck,
SignedCertificateTimestamp(SCTList),
SupportedVersions(ProtocolVersion),
TransportParameters(Vec<u8>),
EarlyData,
Unknown(UnknownExtension),
}
impl ServerExtension {
pub fn get_type(&self) -> ExtensionType {
match *self {
ServerExtension::ECPointFormats(_) => ExtensionType::ECPointFormats,
ServerExtension::ServerNameAck => ExtensionType::ServerName,
ServerExtension::SessionTicketAck => ExtensionType::SessionTicket,
ServerExtension::RenegotiationInfo(_) => ExtensionType::RenegotiationInfo,
ServerExtension::Protocols(_) => ExtensionType::ALProtocolNegotiation,
ServerExtension::KeyShare(_) => ExtensionType::KeyShare,
ServerExtension::PresharedKey(_) => ExtensionType::PreSharedKey,
ServerExtension::ExtendedMasterSecretAck => ExtensionType::ExtendedMasterSecret,
ServerExtension::CertificateStatusAck => ExtensionType::StatusRequest,
ServerExtension::SignedCertificateTimestamp(_) => ExtensionType::SCT,
ServerExtension::SupportedVersions(_) => ExtensionType::SupportedVersions,
ServerExtension::TransportParameters(_) => ExtensionType::TransportParameters,
ServerExtension::EarlyData => ExtensionType::EarlyData,
ServerExtension::Unknown(ref r) => r.typ,
}
}
}
impl Codec for ServerExtension {
fn encode(&self, bytes: &mut Vec<u8>) {
self.get_type().encode(bytes);
let mut sub: Vec<u8> = Vec::new();
match *self {
ServerExtension::ECPointFormats(ref r) => r.encode(&mut sub),
ServerExtension::ServerNameAck |
ServerExtension::SessionTicketAck |
ServerExtension::ExtendedMasterSecretAck |
ServerExtension::CertificateStatusAck |
ServerExtension::EarlyData => (),
ServerExtension::RenegotiationInfo(ref r) => r.encode(&mut sub),
ServerExtension::Protocols(ref r) => r.encode(&mut sub),
ServerExtension::KeyShare(ref r) => r.encode(&mut sub),
ServerExtension::PresharedKey(r) => r.encode(&mut sub),
ServerExtension::SignedCertificateTimestamp(ref r) => r.encode(&mut sub),
ServerExtension::SupportedVersions(ref r) => r.encode(&mut sub),
ServerExtension::TransportParameters(ref r) => sub.extend_from_slice(r),
ServerExtension::Unknown(ref r) => r.encode(&mut sub),
}
(sub.len() as u16).encode(bytes);
bytes.append(&mut sub);
}
fn read(r: &mut Reader) -> Option<ServerExtension> {
let typ = ExtensionType::read(r)?;
let len = u16::read(r)? as usize;
let mut sub = r.sub(len)?;
Some(match typ {
ExtensionType::ECPointFormats => {
ServerExtension::ECPointFormats(ECPointFormatList::read(&mut sub)?)
}
ExtensionType::ServerName => ServerExtension::ServerNameAck,
ExtensionType::SessionTicket => ServerExtension::SessionTicketAck,
ExtensionType::StatusRequest => ServerExtension::CertificateStatusAck,
ExtensionType::RenegotiationInfo => {
ServerExtension::RenegotiationInfo(PayloadU8::read(&mut sub)?)
}
ExtensionType::ALProtocolNegotiation => {
ServerExtension::Protocols(ProtocolNameList::read(&mut sub)?)
}
ExtensionType::KeyShare => {
ServerExtension::KeyShare(KeyShareEntry::read(&mut sub)?)
}
ExtensionType::PreSharedKey => {
ServerExtension::PresharedKey(u16::read(&mut sub)?)
}
ExtensionType::ExtendedMasterSecret => ServerExtension::ExtendedMasterSecretAck,
ExtensionType::SCT => {
let scts = SCTList::read(&mut sub)?;
ServerExtension::SignedCertificateTimestamp(scts)
}
ExtensionType::SupportedVersions => {
ServerExtension::SupportedVersions(ProtocolVersion::read(&mut sub)?)
}
ExtensionType::TransportParameters => {
ServerExtension::TransportParameters(sub.rest().to_vec())
}
ExtensionType::EarlyData => ServerExtension::EarlyData,
_ => ServerExtension::Unknown(UnknownExtension::read(typ, &mut sub)?),
})
}
}
impl ServerExtension {
pub fn make_alpn(proto: &[&[u8]]) -> ServerExtension {
ServerExtension::Protocols(ProtocolNameList::from_slices(proto))
}
pub fn make_empty_renegotiation_info() -> ServerExtension {
let empty = Vec::new();
ServerExtension::RenegotiationInfo(PayloadU8::new(empty))
}
pub fn make_sct(sctl: Vec<u8>) -> ServerExtension {
let scts = SCTList::read_bytes(&sctl)
.expect("invalid SCT list");
ServerExtension::SignedCertificateTimestamp(scts)
}
}
#[derive(Debug)]
pub struct ClientHelloPayload {
pub client_version: ProtocolVersion,
pub random: Random,
pub session_id: SessionID,
pub cipher_suites: Vec<CipherSuite>,
pub compression_methods: Vec<Compression>,
pub extensions: Vec<ClientExtension>,
}
impl Codec for ClientHelloPayload {
fn encode(&self, bytes: &mut Vec<u8>) {
self.client_version.encode(bytes);
self.random.encode(bytes);
self.session_id.encode(bytes);
codec::encode_vec_u16(bytes, &self.cipher_suites);
codec::encode_vec_u8(bytes, &self.compression_methods);
if !self.extensions.is_empty() {
codec::encode_vec_u16(bytes, &self.extensions);
}
}
fn read(r: &mut Reader) -> Option<ClientHelloPayload> {
let mut ret = ClientHelloPayload {
client_version: ProtocolVersion::read(r)?,
random: Random::read(r)?,
session_id: SessionID::read(r)?,
cipher_suites: codec::read_vec_u16::<CipherSuite>(r)?,
compression_methods: codec::read_vec_u8::<Compression>(r)?,
extensions: Vec::new(),
};
if r.any_left() {
ret.extensions = codec::read_vec_u16::<ClientExtension>(r)?;
}
Some(ret)
}
}
impl ClientHelloPayload {
/// Returns true if there is more than one extension of a given
/// type.
pub fn has_duplicate_extension(&self) -> bool {
let mut seen = collections::HashSet::new();
for ext in &self.extensions {
let typ = ext.get_type().get_u16();
if seen.contains(&typ) {
return true;
}
seen.insert(typ);
}
false
}
pub fn find_extension(&self, ext: ExtensionType) -> Option<&ClientExtension> {
self.extensions.iter().find(|x| x.get_type() == ext)
}
pub fn get_sni_extension(&self) -> Option<&ServerNameRequest> {
let ext = self.find_extension(ExtensionType::ServerName)?;
match *ext {
ClientExtension::ServerName(ref req) => Some(req),
_ => None,
}
}
pub fn get_sigalgs_extension(&self) -> Option<&SupportedSignatureSchemes> {
let ext = self.find_extension(ExtensionType::SignatureAlgorithms)?;
match *ext {
ClientExtension::SignatureAlgorithms(ref req) => Some(req),
_ => None,
}
}
pub fn get_namedgroups_extension(&self) -> Option<&NamedGroups> {
let ext = self.find_extension(ExtensionType::EllipticCurves)?;
match *ext {
ClientExtension::NamedGroups(ref req) => Some(req),
_ => None,
}
}
pub fn get_ecpoints_extension(&self) -> Option<&ECPointFormatList> {
let ext = self.find_extension(ExtensionType::ECPointFormats)?;
match *ext {
ClientExtension::ECPointFormats(ref req) => Some(req),
_ => None,
}
}
pub fn get_alpn_extension(&self) -> Option<&ProtocolNameList> {
let ext = self.find_extension(ExtensionType::ALProtocolNegotiation)?;
match *ext {
ClientExtension::Protocols(ref req) => Some(req),
_ => None,
}
}
pub fn get_quic_params_extension(&self) -> Option<Vec<u8>> {
let ext = self.find_extension(ExtensionType::TransportParameters)?;
match *ext {
ClientExtension::TransportParameters(ref bytes) => Some(bytes.to_vec()),
_ => None,
}
}
pub fn get_ticket_extension(&self) -> Option<&ClientExtension> {
self.find_extension(ExtensionType::SessionTicket)
}
pub fn get_versions_extension(&self) -> Option<&ProtocolVersions> {
let ext = self.find_extension(ExtensionType::SupportedVersions)?;
match *ext {
ClientExtension::SupportedVersions(ref vers) => Some(vers),
_ => None,
}
}
pub fn get_keyshare_extension(&self) -> Option<&KeyShareEntries> {
let ext = self.find_extension(ExtensionType::KeyShare)?;
match *ext {
ClientExtension::KeyShare(ref shares) => Some(shares),
_ => None,
}
}
pub fn has_keyshare_extension_with_duplicates(&self) -> bool {
let entries = self.get_keyshare_extension();
if entries.is_none() {
return false;
}
let mut seen = collections::HashSet::new();
for kse in entries.unwrap() {
let grp = kse.group.get_u16();
if seen.contains(&grp) {
return true;
}
seen.insert(grp);
}
false
}
pub fn get_psk(&self) -> Option<&PresharedKeyOffer> {
let ext = self.find_extension(ExtensionType::PreSharedKey)?;
match *ext {
ClientExtension::PresharedKey(ref psk) => Some(psk),
_ => None,
}
}
pub fn check_psk_ext_is_last(&self) -> bool {
self.extensions
.last()
.map_or(false, |ext| ext.get_type() == ExtensionType::PreSharedKey)
}
pub fn get_psk_modes(&self) -> Option<&PSKKeyExchangeModes> {
let ext = self.find_extension(ExtensionType::PSKKeyExchangeModes)?;
match *ext {
ClientExtension::PresharedKeyModes(ref psk_modes) => Some(psk_modes),
_ => None,
}
}
pub fn psk_mode_offered(&self, mode: PSKKeyExchangeMode) -> bool {
self.get_psk_modes()
.and_then(|modes| Some(modes.contains(&mode)))
.or(Some(false))
.unwrap()
}
pub fn set_psk_binder(&mut self, binder: Vec<u8>) {
let last_extension = self.extensions.last_mut().unwrap();
if let ClientExtension::PresharedKey(ref mut offer) = *last_extension {
offer.binders[0] = PresharedKeyBinder::new(binder);
}
}
pub fn ems_support_offered(&self) -> bool {
self.find_extension(ExtensionType::ExtendedMasterSecret)
.is_some()
}
pub fn early_data_extension_offered(&self) -> bool {
self.find_extension(ExtensionType::EarlyData).is_some()
}
}
#[derive(Debug)]
pub enum HelloRetryExtension {
KeyShare(NamedGroup),
Cookie(PayloadU16),
SupportedVersions(ProtocolVersion),
Unknown(UnknownExtension),
}
impl HelloRetryExtension {
pub fn get_type(&self) -> ExtensionType {
match *self {
HelloRetryExtension::KeyShare(_) => ExtensionType::KeyShare,
HelloRetryExtension::Cookie(_) => ExtensionType::Cookie,
HelloRetryExtension::SupportedVersions(_) => ExtensionType::SupportedVersions,
HelloRetryExtension::Unknown(ref r) => r.typ,
}
}
}
impl Codec for HelloRetryExtension {
fn encode(&self, bytes: &mut Vec<u8>) {
self.get_type().encode(bytes);
let mut sub: Vec<u8> = Vec::new();
match *self {
HelloRetryExtension::KeyShare(ref r) => r.encode(&mut sub),
HelloRetryExtension::Cookie(ref r) => r.encode(&mut sub),
HelloRetryExtension::SupportedVersions(ref r) => r.encode(&mut sub),
HelloRetryExtension::Unknown(ref r) => r.encode(&mut sub),
}
(sub.len() as u16).encode(bytes);
bytes.append(&mut sub);
}
fn read(r: &mut Reader) -> Option<HelloRetryExtension> {
let typ = ExtensionType::read(r)?;
let len = u16::read(r)? as usize;
let mut sub = r.sub(len)?;
Some(match typ {
ExtensionType::KeyShare => {
HelloRetryExtension::KeyShare(NamedGroup::read(&mut sub)?)
}
ExtensionType::Cookie => {
HelloRetryExtension::Cookie(PayloadU16::read(&mut sub)?)
}
ExtensionType::SupportedVersions => {
HelloRetryExtension::SupportedVersions(ProtocolVersion::read(&mut sub)?)
}
_ => HelloRetryExtension::Unknown(UnknownExtension::read(typ, &mut sub)?),
})
}
}
#[derive(Debug)]
pub struct HelloRetryRequest {
pub legacy_version: ProtocolVersion,
pub session_id: SessionID,
pub cipher_suite: CipherSuite,
pub extensions: Vec<HelloRetryExtension>,
}
impl Codec for HelloRetryRequest {
fn encode(&self, bytes: &mut Vec<u8>) {
self.legacy_version.encode(bytes);
HELLO_RETRY_REQUEST_RANDOM.encode(bytes);
self.session_id.encode(bytes);
self.cipher_suite.encode(bytes);
Compression::Null.encode(bytes);
codec::encode_vec_u16(bytes, &self.extensions);
}
fn read(r: &mut Reader) -> Option<HelloRetryRequest> {
let session_id = SessionID::read(r)?;
let cipher_suite = CipherSuite::read(r)?;
let compression = Compression::read(r)?;
if compression != Compression::Null {
return None;
}
Some(HelloRetryRequest {
legacy_version: ProtocolVersion::Unknown(0),
session_id,
cipher_suite,
extensions: codec::read_vec_u16::<HelloRetryExtension>(r)?,
})
}
}
impl HelloRetryRequest {
/// Returns true if there is more than one extension of a given
/// type.
pub fn has_duplicate_extension(&self) -> bool {
let mut seen = collections::HashSet::new();
for ext in &self.extensions {
let typ = ext.get_type().get_u16();
if seen.contains(&typ) {
return true;
}
seen.insert(typ);
}
false
}
pub fn has_unknown_extension(&self) -> bool {
self.extensions
.iter()
.any(|ext| {
ext.get_type() != ExtensionType::KeyShare &&
ext.get_type() != ExtensionType::SupportedVersions &&
ext.get_type() != ExtensionType::Cookie
})
}
fn find_extension(&self, ext: ExtensionType) -> Option<&HelloRetryExtension> {
self.extensions.iter().find(|x| x.get_type() == ext)
}
pub fn get_requested_key_share_group(&self) -> Option<NamedGroup> {
let ext = self.find_extension(ExtensionType::KeyShare)?;
match *ext {
HelloRetryExtension::KeyShare(grp) => Some(grp),
_ => None,
}
}
pub fn get_cookie(&self) -> Option<&PayloadU16> {
let ext = self.find_extension(ExtensionType::Cookie)?;
match *ext {
HelloRetryExtension::Cookie(ref ck) => Some(ck),
_ => None,
}
}
pub fn get_supported_versions(&self) -> Option<ProtocolVersion> {
let ext = self.find_extension(ExtensionType::SupportedVersions)?;
match *ext {
HelloRetryExtension::SupportedVersions(ver) => Some(ver),
_ => None,
}
}
}
#[derive(Debug)]
pub struct ServerHelloPayload {
pub legacy_version: ProtocolVersion,
pub random: Random,
pub session_id: SessionID,
pub cipher_suite: CipherSuite,
pub compression_method: Compression,
pub extensions: Vec<ServerExtension>,
}
impl Codec for ServerHelloPayload {
fn encode(&self, bytes: &mut Vec<u8>) {
self.legacy_version.encode(bytes);
self.random.encode(bytes);
self.session_id.encode(bytes);
self.cipher_suite.encode(bytes);
self.compression_method.encode(bytes);
if !self.extensions.is_empty() {
codec::encode_vec_u16(bytes, &self.extensions);
}
}
// minus version and random, which have already been read.
fn read(r: &mut Reader) -> Option<ServerHelloPayload> {
let session_id = SessionID::read(r)?;
let suite = CipherSuite::read(r)?;
let compression = Compression::read(r)?;
let mut ret = ServerHelloPayload {
legacy_version: ProtocolVersion::Unknown(0),
random: ZERO_RANDOM.clone(),
session_id,
cipher_suite: suite,
compression_method: compression,
extensions: Vec::new(),
};
if r.any_left() {
ret.extensions = codec::read_vec_u16::<ServerExtension>(r)?;
}
Some(ret)
}
}
impl HasServerExtensions for ServerHelloPayload {
fn get_extensions(&self) -> &[ServerExtension] {
&self.extensions
}
}
impl ServerHelloPayload {
pub fn get_key_share(&self) -> Option<&KeyShareEntry> {
let ext = self.find_extension(ExtensionType::KeyShare)?;
match *ext {
ServerExtension::KeyShare(ref share) => Some(share),
_ => None,
}
}
pub fn get_psk_index(&self) -> Option<u16> {
let ext = self.find_extension(ExtensionType::PreSharedKey)?;
match *ext {
ServerExtension::PresharedKey(ref index) => Some(*index),
_ => None,
}
}
pub fn get_ecpoints_extension(&self) -> Option<&ECPointFormatList> {
let ext = self.find_extension(ExtensionType::ECPointFormats)?;
match *ext {
ServerExtension::ECPointFormats(ref fmts) => Some(fmts),
_ => None,
}
}
pub fn ems_support_acked(&self) -> bool {
self.find_extension(ExtensionType::ExtendedMasterSecret)
.is_some()
}
pub fn get_sct_list(&self) -> Option<&SCTList> {
let ext = self.find_extension(ExtensionType::SCT)?;
match *ext {
ServerExtension::SignedCertificateTimestamp(ref sctl) => Some(sctl),
_ => None,
}
}
pub fn get_supported_versions(&self) -> Option<ProtocolVersion> {
let ext = self.find_extension(ExtensionType::SupportedVersions)?;
match *ext {
ServerExtension::SupportedVersions(vers) => Some(vers),
_ => None,
}
}
}
pub type CertificatePayload = Vec<key::Certificate>;
impl Codec for CertificatePayload {
fn encode(&self, bytes: &mut Vec<u8>) {
codec::encode_vec_u24(bytes, self);
}
fn read(r: &mut Reader) -> Option<CertificatePayload> {
// 64KB of certificates is plenty, 16MB is obviously silly
codec::read_vec_u24_limited(r, 0x10000)
}
}
// TLS1.3 changes the Certificate payload encoding.
// That's annoying. It means the parsing is not
// context-free any more.
#[derive(Debug)]
pub enum CertificateExtension {
CertificateStatus(CertificateStatus),
SignedCertificateTimestamp(SCTList),
Unknown(UnknownExtension),
}
impl CertificateExtension {
pub fn get_type(&self) -> ExtensionType {
match *self {
CertificateExtension::CertificateStatus(_) => ExtensionType::StatusRequest,
CertificateExtension::SignedCertificateTimestamp(_) => ExtensionType::SCT,
CertificateExtension::Unknown(ref r) => r.typ,
}
}
pub fn make_sct(sct_list: Vec<u8>) -> CertificateExtension {
let sctl = SCTList::read_bytes(&sct_list)
.expect("invalid SCT list");
CertificateExtension::SignedCertificateTimestamp(sctl)
}
pub fn get_cert_status(&self) -> Option<&Vec<u8>> {
match *self {
CertificateExtension::CertificateStatus(ref cs) => Some(&cs.ocsp_response.0),
_ => None
}
}
pub fn get_sct_list(&self) -> Option<&SCTList> {
match *self {
CertificateExtension::SignedCertificateTimestamp(ref sctl) => Some(sctl),
_ => None
}
}
}
impl Codec for CertificateExtension {
fn encode(&self, bytes: &mut Vec<u8>) {
self.get_type().encode(bytes);
let mut sub: Vec<u8> = Vec::new();
match *self {
CertificateExtension::CertificateStatus(ref r) => r.encode(&mut sub),
CertificateExtension::SignedCertificateTimestamp(ref r) => r.encode(&mut sub),
CertificateExtension::Unknown(ref r) => r.encode(&mut sub),
}
(sub.len() as u16).encode(bytes);
bytes.append(&mut sub);
}
fn read(r: &mut Reader) -> Option<CertificateExtension> {
let typ = ExtensionType::read(r)?;
let len = u16::read(r)? as usize;
let mut sub = r.sub(len)?;
Some(match typ {
ExtensionType::StatusRequest => {
let st = CertificateStatus::read(&mut sub)?;
CertificateExtension::CertificateStatus(st)
}
ExtensionType::SCT => {
let scts = SCTList::read(&mut sub)?;
CertificateExtension::SignedCertificateTimestamp(scts)
}
_ => CertificateExtension::Unknown(UnknownExtension::read(typ, &mut sub)?),
})
}
}
declare_u16_vec!(CertificateExtensions, CertificateExtension);
#[derive(Debug)]
pub struct CertificateEntry {
pub cert: key::Certificate,
pub exts: CertificateExtensions,
}
impl Codec for CertificateEntry {
fn encode(&self, bytes: &mut Vec<u8>) {
self.cert.encode(bytes);
self.exts.encode(bytes);
}
fn read(r: &mut Reader) -> Option<CertificateEntry> {
Some(CertificateEntry {
cert: key::Certificate::read(r)?,
exts: CertificateExtensions::read(r)?,
})
}
}
impl CertificateEntry {
pub fn new(cert: key::Certificate) -> CertificateEntry {
CertificateEntry {
cert,
exts: Vec::new(),
}
}
pub fn has_duplicate_extension(&self) -> bool {
let mut seen = collections::HashSet::new();
for ext in &self.exts {
let typ = ext.get_type().get_u16();
if seen.contains(&typ) {
return true;
}
seen.insert(typ);
}
false
}
pub fn has_unknown_extension(&self) -> bool {
self.exts
.iter()
.any(|ext| {
ext.get_type() != ExtensionType::StatusRequest &&
ext.get_type() != ExtensionType::SCT
})
}
pub fn get_ocsp_response(&self) -> Option<&Vec<u8>> {
self.exts
.iter()
.find(|ext| ext.get_type() == ExtensionType::StatusRequest)
.and_then(|ext| ext.get_cert_status())
}
pub fn get_scts(&self) -> Option<&SCTList> {
self.exts
.iter()
.find(|ext| ext.get_type() == ExtensionType::SCT)
.and_then(|ext| ext.get_sct_list())
}
}
#[derive(Debug)]
pub struct CertificatePayloadTLS13 {
pub context: PayloadU8,
pub list: Vec<CertificateEntry>,
}
impl Codec for CertificatePayloadTLS13 {
fn encode(&self, bytes: &mut Vec<u8>) {
self.context.encode(bytes);
codec::encode_vec_u24(bytes, &self.list);
}
fn read(r: &mut Reader) -> Option<CertificatePayloadTLS13> {
Some(CertificatePayloadTLS13 {
context: PayloadU8::read(r)?,
list: codec::read_vec_u24_limited::<CertificateEntry>(r, 0x10000)?,
})
}
}
impl CertificatePayloadTLS13 {
pub fn new() -> CertificatePayloadTLS13 {
CertificatePayloadTLS13 {
context: PayloadU8::empty(),
list: Vec::new(),
}
}
pub fn any_entry_has_duplicate_extension(&self) -> bool {
for ent in &self.list {
if ent.has_duplicate_extension() {
return true;
}
}
false
}
pub fn any_entry_has_unknown_extension(&self) -> bool {
for ent in &self.list {
if ent.has_unknown_extension() {
return true;
}
}
false
}
pub fn any_entry_has_extension(&self) -> bool {
for ent in &self.list {
if !ent.exts.is_empty() {
return true;
}
}
false
}
pub fn get_end_entity_ocsp(&self) -> Vec<u8> {
self.list.first()
.and_then(|ent| ent.get_ocsp_response())
.cloned()
.unwrap_or_else( Vec::new)
}
pub fn get_end_entity_scts(&self) -> Option<SCTList> {
self.list.first()
.and_then(|ent| ent.get_scts())
.cloned()
}
pub fn convert(&self) -> CertificatePayload {
let mut ret = Vec::new();
for entry in &self.list {
ret.push(entry.cert.clone());
}
ret
}
}
#[derive(Debug)]
pub enum KeyExchangeAlgorithm {
BulkOnly,
DH,
DHE,
RSA,
ECDH,
ECDHE,
}
// We don't support arbitrary curves. It's a terrible
// idea and unnecessary attack surface. Please,
// get a grip.
#[derive(Debug)]
pub struct ECParameters {
pub curve_type: ECCurveType,
pub named_group: NamedGroup,
}
impl Codec for ECParameters {
fn encode(&self, bytes: &mut Vec<u8>) {
self.curve_type.encode(bytes);
self.named_group.encode(bytes);
}
fn read(r: &mut Reader) -> Option<ECParameters> {
let ct = ECCurveType::read(r)?;
if ct != ECCurveType::NamedCurve {
return None;
}
let grp = NamedGroup::read(r)?;
Some(ECParameters {
curve_type: ct,
named_group: grp,
})
}
}
#[derive(Debug, Clone)]
pub struct DigitallySignedStruct {
pub scheme: SignatureScheme,
pub sig: PayloadU16,
}
impl DigitallySignedStruct {
pub fn new(scheme: SignatureScheme, sig: Vec<u8>) -> DigitallySignedStruct {
DigitallySignedStruct {
scheme,
sig: PayloadU16::new(sig),
}
}
}
impl Codec for DigitallySignedStruct {
fn encode(&self, bytes: &mut Vec<u8>) {
self.scheme.encode(bytes);
self.sig.encode(bytes);
}
fn read(r: &mut Reader) -> Option<DigitallySignedStruct> {
let scheme = SignatureScheme::read(r)?;
let sig = PayloadU16::read(r)?;
Some(DigitallySignedStruct {
scheme,
sig,
})
}
}
#[derive(Debug)]
pub struct ClientECDHParams {
pub public: PayloadU8,
}
impl Codec for ClientECDHParams {
fn encode(&self, bytes: &mut Vec<u8>) {
self.public.encode(bytes);
}
fn read(r: &mut Reader) -> Option<ClientECDHParams> {
let pb = PayloadU8::read(r)?;
Some(ClientECDHParams { public: pb })
}
}
#[derive(Debug)]
pub struct ServerECDHParams {
pub curve_params: ECParameters,
pub public: PayloadU8,
}
impl ServerECDHParams {
pub fn new(named_group: &NamedGroup, pubkey: &[u8]) -> ServerECDHParams {
ServerECDHParams {
curve_params: ECParameters {
curve_type: ECCurveType::NamedCurve,
named_group: *named_group,
},
public: PayloadU8::new(pubkey.to_vec()),
}
}
}
impl Codec for ServerECDHParams {
fn encode(&self, bytes: &mut Vec<u8>) {
self.curve_params.encode(bytes);
self.public.encode(bytes);
}
fn read(r: &mut Reader) -> Option<ServerECDHParams> {
let cp = ECParameters::read(r)?;
let pb = PayloadU8::read(r)?;
Some(ServerECDHParams {
curve_params: cp,
public: pb,
})
}
}
#[derive(Debug)]
pub struct ECDHEServerKeyExchange {
pub params: ServerECDHParams,
pub dss: DigitallySignedStruct,
}
impl Codec for ECDHEServerKeyExchange {
fn encode(&self, bytes: &mut Vec<u8>) {
self.params.encode(bytes);
self.dss.encode(bytes);
}
fn read(r: &mut Reader) -> Option<ECDHEServerKeyExchange> {
let params = ServerECDHParams::read(r)?;
let dss = DigitallySignedStruct::read(r)?;
Some(ECDHEServerKeyExchange {
params,
dss,
})
}
}
#[derive(Debug)]
pub enum ServerKeyExchangePayload {
ECDHE(ECDHEServerKeyExchange),
Unknown(Payload),
}
impl Codec for ServerKeyExchangePayload {
fn encode(&self, bytes: &mut Vec<u8>) {
match *self {
ServerKeyExchangePayload::ECDHE(ref x) => x.encode(bytes),
ServerKeyExchangePayload::Unknown(ref x) => x.encode(bytes),
}
}
fn read(r: &mut Reader) -> Option<ServerKeyExchangePayload> {
// read as Unknown, fully parse when we know the
// KeyExchangeAlgorithm
Payload::read(r).and_then(|x| Some(ServerKeyExchangePayload::Unknown(x)))
}
}
impl ServerKeyExchangePayload {
pub fn unwrap_given_kxa(&self, kxa: &KeyExchangeAlgorithm) -> Option<ServerKeyExchangePayload> {
if let ServerKeyExchangePayload::Unknown(ref unk) = *self {
let mut rd = Reader::init(&unk.0);
let result = match *kxa {
KeyExchangeAlgorithm::ECDHE => {
ECDHEServerKeyExchange::read(&mut rd)
.and_then(|x| Some(ServerKeyExchangePayload::ECDHE(x)))
}
_ => None,
};
if !rd.any_left() {
return result;
};
}
None
}
pub fn encode_params(&self, bytes: &mut Vec<u8>) {
bytes.clear();
if let ServerKeyExchangePayload::ECDHE(ref x) = *self {
x.params.encode(bytes);
}
}
pub fn get_sig(&self) -> Option<DigitallySignedStruct> {
match *self {
ServerKeyExchangePayload::ECDHE(ref x) => Some(x.dss.clone()),
_ => None,
}
}
}
// -- EncryptedExtensions (TLS1.3 only) --
declare_u16_vec!(EncryptedExtensions, ServerExtension);
pub trait HasServerExtensions {
fn get_extensions(&self) -> &[ServerExtension];
/// Returns true if there is more than one extension of a given
/// type.
fn has_duplicate_extension(&self) -> bool {
let mut seen = collections::HashSet::new();
for ext in self.get_extensions() {
let typ = ext.get_type().get_u16();
if seen.contains(&typ) {
return true;
}
seen.insert(typ);
}
false
}
fn find_extension(&self, ext: ExtensionType) -> Option<&ServerExtension> {
self.get_extensions().iter().find(|x| x.get_type() == ext)
}
fn get_alpn_protocol(&self) -> Option<&[u8]> {
let ext = self.find_extension(ExtensionType::ALProtocolNegotiation)?;
match *ext {
ServerExtension::Protocols(ref protos) => protos.as_single_slice(),
_ => None,
}
}
fn get_quic_params_extension(&self) -> Option<Vec<u8>> {
let ext = self.find_extension(ExtensionType::TransportParameters)?;
match *ext {
ServerExtension::TransportParameters(ref bytes) => Some(bytes.to_vec()),
_ => None,
}
}
fn early_data_extension_offered(&self) -> bool {
self.find_extension(ExtensionType::EarlyData).is_some()
}
}
impl HasServerExtensions for EncryptedExtensions {
fn get_extensions(&self) -> &[ServerExtension] {
self
}
}
// -- CertificateRequest and sundries --
declare_u8_vec!(ClientCertificateTypes, ClientCertificateType);
pub type DistinguishedName = PayloadU16;
pub type DistinguishedNames = VecU16OfPayloadU16;
#[derive(Debug)]
pub struct CertificateRequestPayload {
pub certtypes: ClientCertificateTypes,
pub sigschemes: SupportedSignatureSchemes,
pub canames: DistinguishedNames,
}
impl Codec for CertificateRequestPayload {
fn encode(&self, bytes: &mut Vec<u8>) {
self.certtypes.encode(bytes);
self.sigschemes.encode(bytes);
self.canames.encode(bytes);
}
fn read(r: &mut Reader) -> Option<CertificateRequestPayload> {
let certtypes = ClientCertificateTypes::read(r)?;
let sigschemes = SupportedSignatureSchemes::read(r)?;
let canames = DistinguishedNames::read(r)?;
Some(CertificateRequestPayload {
certtypes,
sigschemes,
canames,
})
}
}
#[derive(Debug)]
pub enum CertReqExtension {
SignatureAlgorithms(SupportedSignatureSchemes),
AuthorityNames(DistinguishedNames),
Unknown(UnknownExtension),
}
impl CertReqExtension {
pub fn get_type(&self) -> ExtensionType {
match *self {
CertReqExtension::SignatureAlgorithms(_) => ExtensionType::SignatureAlgorithms,
CertReqExtension::AuthorityNames(_) => ExtensionType::CertificateAuthorities,
CertReqExtension::Unknown(ref r) => r.typ,
}
}
}
impl Codec for CertReqExtension {
fn encode(&self, bytes: &mut Vec<u8>) {
self.get_type().encode(bytes);
let mut sub: Vec<u8> = Vec::new();
match *self {
CertReqExtension::SignatureAlgorithms(ref r) => r.encode(&mut sub),
CertReqExtension::AuthorityNames(ref r) => r.encode(&mut sub),
CertReqExtension::Unknown(ref r) => r.encode(&mut sub),
}
(sub.len() as u16).encode(bytes);
bytes.append(&mut sub);
}
fn read(r: &mut Reader) -> Option<CertReqExtension> {
let typ = ExtensionType::read(r)?;
let len = u16::read(r)? as usize;
let mut sub = r.sub(len)?;
Some(match typ {
ExtensionType::SignatureAlgorithms => {
let schemes = SupportedSignatureSchemes::read(&mut sub)?;
if schemes.is_empty() {
return None;
}
CertReqExtension::SignatureAlgorithms(schemes)
}
ExtensionType::CertificateAuthorities => {
let cas = DistinguishedNames::read(&mut sub)?;
CertReqExtension::AuthorityNames(cas)
}
_ => CertReqExtension::Unknown(UnknownExtension::read(typ, &mut sub)?),
})
}
}
declare_u16_vec!(CertReqExtensions, CertReqExtension);
#[derive(Debug)]
pub struct CertificateRequestPayloadTLS13 {
pub context: PayloadU8,
pub extensions: CertReqExtensions,
}
impl Codec for CertificateRequestPayloadTLS13 {
fn encode(&self, bytes: &mut Vec<u8>) {
self.context.encode(bytes);
self.extensions.encode(bytes);
}
fn read(r: &mut Reader) -> Option<CertificateRequestPayloadTLS13> {
let context = PayloadU8::read(r)?;
let extensions = CertReqExtensions::read(r)?;
Some(CertificateRequestPayloadTLS13 {
context,
extensions,
})
}
}
impl CertificateRequestPayloadTLS13 {
pub fn find_extension(&self, ext: ExtensionType) -> Option<&CertReqExtension> {
self.extensions.iter().find(|x| x.get_type() == ext)
}
pub fn get_sigalgs_extension(&self) -> Option<&SupportedSignatureSchemes> {
let ext = self.find_extension(ExtensionType::SignatureAlgorithms)?;
match *ext {
CertReqExtension::SignatureAlgorithms(ref sa) => Some(sa),
_ => None,
}
}
pub fn get_authorities_extension(&self) -> Option<&DistinguishedNames> {
let ext = self.find_extension(ExtensionType::CertificateAuthorities)?;
match *ext {
CertReqExtension::AuthorityNames(ref an) => Some(an),
_ => None,
}
}
}
// -- NewSessionTicket --
#[derive(Debug)]
pub struct NewSessionTicketPayload {
pub lifetime_hint: u32,
pub ticket: PayloadU16,
}
impl NewSessionTicketPayload {
pub fn new(lifetime_hint: u32, ticket: Vec<u8>) -> NewSessionTicketPayload {
NewSessionTicketPayload {
lifetime_hint,
ticket: PayloadU16::new(ticket),
}
}
}
impl Codec for NewSessionTicketPayload {
fn encode(&self, bytes: &mut Vec<u8>) {
self.lifetime_hint.encode(bytes);
self.ticket.encode(bytes);
}
fn read(r: &mut Reader) -> Option<NewSessionTicketPayload> {
let lifetime = u32::read(r)?;
let ticket = PayloadU16::read(r)?;
Some(NewSessionTicketPayload {
lifetime_hint: lifetime,
ticket,
})
}
}
// -- NewSessionTicket electric boogaloo --
#[derive(Debug)]
pub enum NewSessionTicketExtension {
EarlyData(u32),
Unknown(UnknownExtension),
}
impl NewSessionTicketExtension {
pub fn get_type(&self) -> ExtensionType {
match *self {
NewSessionTicketExtension::EarlyData(_) => ExtensionType::EarlyData,
NewSessionTicketExtension::Unknown(ref r) => r.typ,
}
}
}
impl Codec for NewSessionTicketExtension {
fn encode(&self, bytes: &mut Vec<u8>) {
self.get_type().encode(bytes);
let mut sub: Vec<u8> = Vec::new();
match *self {
NewSessionTicketExtension::EarlyData(r) => r.encode(&mut sub),
NewSessionTicketExtension::Unknown(ref r) => r.encode(&mut sub),
}
(sub.len() as u16).encode(bytes);
bytes.append(&mut sub);
}
fn read(r: &mut Reader) -> Option<NewSessionTicketExtension> {
let typ = ExtensionType::read(r)?;
let len = u16::read(r)? as usize;
let mut sub = r.sub(len)?;
Some(match typ {
ExtensionType::EarlyData => NewSessionTicketExtension::EarlyData(u32::read(&mut sub)?),
_ => {
NewSessionTicketExtension::Unknown(UnknownExtension::read(typ, &mut sub)?)
}
})
}
}
declare_u16_vec!(NewSessionTicketExtensions, NewSessionTicketExtension);
#[derive(Debug)]
pub struct NewSessionTicketPayloadTLS13 {
pub lifetime: u32,
pub age_add: u32,
pub nonce: PayloadU8,
pub ticket: PayloadU16,
pub exts: NewSessionTicketExtensions,
}
impl NewSessionTicketPayloadTLS13 {
pub fn new(lifetime: u32,
age_add: u32,
nonce: Vec<u8>,
ticket: Vec<u8>) -> NewSessionTicketPayloadTLS13 {
NewSessionTicketPayloadTLS13 {
lifetime,
age_add,
nonce: PayloadU8::new(nonce),
ticket: PayloadU16::new(ticket),
exts: vec![],
}
}
pub fn find_extension(&self, ext: ExtensionType) -> Option<&NewSessionTicketExtension> {
self.exts.iter().find(|x| x.get_type() == ext)
}
pub fn get_max_early_data_size(&self) -> Option<u32> {
let ext = self.find_extension(ExtensionType::EarlyData)?;
match *ext {
NewSessionTicketExtension::EarlyData(ref sz) => Some(*sz),
_ => None
}
}
}
impl Codec for NewSessionTicketPayloadTLS13 {
fn encode(&self, bytes: &mut Vec<u8>) {
self.lifetime.encode(bytes);
self.age_add.encode(bytes);
self.nonce.encode(bytes);
self.ticket.encode(bytes);
self.exts.encode(bytes);
}
fn read(r: &mut Reader) -> Option<NewSessionTicketPayloadTLS13> {
let lifetime = u32::read(r)?;
let age_add = u32::read(r)?;
let nonce = PayloadU8::read(r)?;
let ticket = PayloadU16::read(r)?;
let exts = NewSessionTicketExtensions::read(r)?;
Some(NewSessionTicketPayloadTLS13 {
lifetime,
age_add,
nonce,
ticket,
exts,
})
}
}
// -- RFC6066 certificate status types
/// Only supports OCSP
#[derive(Debug)]
pub struct CertificateStatus {
pub ocsp_response: PayloadU24
}
impl Codec for CertificateStatus {
fn encode(&self, bytes: &mut Vec<u8>) {
CertificateStatusType::OCSP.encode(bytes);
self.ocsp_response.encode(bytes);
}
fn read(r: &mut Reader) -> Option<CertificateStatus> {
let typ = CertificateStatusType::read(r)?;
match typ {
CertificateStatusType::OCSP => {
Some(CertificateStatus {
ocsp_response: PayloadU24::read(r)?
})
}
_ => None
}
}
}
impl CertificateStatus {
pub fn new(ocsp: Vec<u8>) -> CertificateStatus {
CertificateStatus { ocsp_response: PayloadU24::new(ocsp) }
}
pub fn take_ocsp_response(&mut self) -> Vec<u8> {
let new = PayloadU24::new(Vec::new());
mem::replace(&mut self.ocsp_response, new).0
}
}
#[derive(Debug)]
pub enum HandshakePayload {
HelloRequest,
ClientHello(ClientHelloPayload),
ServerHello(ServerHelloPayload),
HelloRetryRequest(HelloRetryRequest),
Certificate(CertificatePayload),
CertificateTLS13(CertificatePayloadTLS13),
ServerKeyExchange(ServerKeyExchangePayload),
CertificateRequest(CertificateRequestPayload),
CertificateRequestTLS13(CertificateRequestPayloadTLS13),
CertificateVerify(DigitallySignedStruct),
ServerHelloDone,
EarlyData,
EndOfEarlyData,
ClientKeyExchange(Payload),
NewSessionTicket(NewSessionTicketPayload),
NewSessionTicketTLS13(NewSessionTicketPayloadTLS13),
EncryptedExtensions(EncryptedExtensions),
KeyUpdate(KeyUpdateRequest),
Finished(Payload),
CertificateStatus(CertificateStatus),
MessageHash(Payload),
Unknown(Payload),
}
impl HandshakePayload {
fn encode(&self, bytes: &mut Vec<u8>) {
match *self {
HandshakePayload::HelloRequest |
HandshakePayload::ServerHelloDone |
HandshakePayload::EarlyData |
HandshakePayload::EndOfEarlyData => {}
HandshakePayload::ClientHello(ref x) => x.encode(bytes),
HandshakePayload::ServerHello(ref x) => x.encode(bytes),
HandshakePayload::HelloRetryRequest(ref x) => x.encode(bytes),
HandshakePayload::Certificate(ref x) => x.encode(bytes),
HandshakePayload::CertificateTLS13(ref x) => x.encode(bytes),
HandshakePayload::ServerKeyExchange(ref x) => x.encode(bytes),
HandshakePayload::ClientKeyExchange(ref x) => x.encode(bytes),
HandshakePayload::CertificateRequest(ref x) => x.encode(bytes),
HandshakePayload::CertificateRequestTLS13(ref x) => x.encode(bytes),
HandshakePayload::CertificateVerify(ref x) => x.encode(bytes),
HandshakePayload::NewSessionTicket(ref x) => x.encode(bytes),
HandshakePayload::NewSessionTicketTLS13(ref x) => x.encode(bytes),
HandshakePayload::EncryptedExtensions(ref x) => x.encode(bytes),
HandshakePayload::KeyUpdate(ref x) => x.encode(bytes),
HandshakePayload::Finished(ref x) => x.encode(bytes),
HandshakePayload::CertificateStatus(ref x) => x.encode(bytes),
HandshakePayload::MessageHash(ref x) => x.encode(bytes),
HandshakePayload::Unknown(ref x) => x.encode(bytes),
}
}
}
#[derive(Debug)]
pub struct HandshakeMessagePayload {
pub typ: HandshakeType,
pub payload: HandshakePayload,
}
impl Codec for HandshakeMessagePayload {
fn encode(&self, bytes: &mut Vec<u8>) {
// encode payload to learn length
let mut sub: Vec<u8> = Vec::new();
self.payload.encode(&mut sub);
// output type, length, and encoded payload
match self.typ {
HandshakeType::HelloRetryRequest => HandshakeType::ServerHello,
_ => self.typ,
}.encode(bytes);
codec::u24(sub.len() as u32).encode(bytes);
bytes.append(&mut sub);
}
fn read(r: &mut Reader) -> Option<HandshakeMessagePayload> {
HandshakeMessagePayload::read_version(r, ProtocolVersion::TLSv1_2)
}
}
impl HandshakeMessagePayload {
pub fn length(&self) -> usize {
let mut buf = Vec::new();
self.encode(&mut buf);
buf.len()
}
pub fn read_version(r: &mut Reader, vers: ProtocolVersion) -> Option<HandshakeMessagePayload> {
let mut typ = HandshakeType::read(r)?;
let len = codec::u24::read(r)?.0 as usize;
let mut sub = r.sub(len)?;
let payload = match typ {
HandshakeType::HelloRequest if sub.left() == 0 => HandshakePayload::HelloRequest,
HandshakeType::ClientHello => {
HandshakePayload::ClientHello(ClientHelloPayload::read(&mut sub)?)
}
HandshakeType::ServerHello => {
let version = ProtocolVersion::read(&mut sub)?;
let random = Random::read(&mut sub)?;
if random == HELLO_RETRY_REQUEST_RANDOM {
let mut hrr = HelloRetryRequest::read(&mut sub)?;
hrr.legacy_version = version;
typ = HandshakeType::HelloRetryRequest;
HandshakePayload::HelloRetryRequest(hrr)
} else {
let mut shp = ServerHelloPayload::read(&mut sub)?;
shp.legacy_version = version;
shp.random = random;
HandshakePayload::ServerHello(shp)
}
}
HandshakeType::Certificate if vers == ProtocolVersion::TLSv1_3 => {
let p = CertificatePayloadTLS13::read(&mut sub)?;
HandshakePayload::CertificateTLS13(p)
}
HandshakeType::Certificate => {
HandshakePayload::Certificate(CertificatePayload::read(&mut sub)?)
}
HandshakeType::ServerKeyExchange => {
let p = ServerKeyExchangePayload::read(&mut sub)?;
HandshakePayload::ServerKeyExchange(p)
}
HandshakeType::ServerHelloDone => {
if sub.any_left() {
return None;
}
HandshakePayload::ServerHelloDone
}
HandshakeType::ClientKeyExchange => {
HandshakePayload::ClientKeyExchange(Payload::read(&mut sub)?)
}
HandshakeType::CertificateRequest if vers == ProtocolVersion::TLSv1_3 => {
let p = CertificateRequestPayloadTLS13::read(&mut sub)?;
HandshakePayload::CertificateRequestTLS13(p)
}
HandshakeType::CertificateRequest => {
let p = CertificateRequestPayload::read(&mut sub)?;
HandshakePayload::CertificateRequest(p)
}
HandshakeType::CertificateVerify => {
HandshakePayload::CertificateVerify(DigitallySignedStruct::read(&mut sub)?)
}
HandshakeType::NewSessionTicket if vers == ProtocolVersion::TLSv1_3 => {
let p = NewSessionTicketPayloadTLS13::read(&mut sub)?;
HandshakePayload::NewSessionTicketTLS13(p)
}
HandshakeType::NewSessionTicket => {
let p = NewSessionTicketPayload::read(&mut sub)?;
HandshakePayload::NewSessionTicket(p)
}
HandshakeType::EncryptedExtensions => {
HandshakePayload::EncryptedExtensions(EncryptedExtensions::read(&mut sub)?)
}
HandshakeType::KeyUpdate => {
HandshakePayload::KeyUpdate(KeyUpdateRequest::read(&mut sub)?)
}
HandshakeType::Finished => {
HandshakePayload::Finished(Payload::read(&mut sub)?)
}
HandshakeType::CertificateStatus => {
HandshakePayload::CertificateStatus(CertificateStatus::read(&mut sub)?)
}
HandshakeType::MessageHash => {
// does not appear on the wire
return None;
}
HandshakeType::HelloRetryRequest => {
// not legal on wire
return None;
}
_ => HandshakePayload::Unknown(Payload::read(&mut sub)?),
};
if sub.any_left() {
None
} else {
Some(HandshakeMessagePayload {
typ,
payload,
})
}
}
pub fn build_key_update_notify() -> HandshakeMessagePayload {
HandshakeMessagePayload {
typ: HandshakeType::KeyUpdate,
payload: HandshakePayload::KeyUpdate(KeyUpdateRequest::UpdateNotRequested),
}
}
pub fn get_encoding_for_binder_signing(&self) -> Vec<u8> {
let mut ret = self.get_encoding();
let binder_len = match self.payload {
HandshakePayload::ClientHello(ref ch) => {
let offer = ch.get_psk().unwrap();
let mut binders_encoding = Vec::new();
offer.binders.encode(&mut binders_encoding);
binders_encoding.len()
}
_ => 0,
};
let ret_len = ret.len() - binder_len;
ret.truncate(ret_len);
ret
}
pub fn build_handshake_hash(hash: &[u8]) -> HandshakeMessagePayload {
HandshakeMessagePayload {
typ: HandshakeType::MessageHash,
payload: HandshakePayload::MessageHash(Payload::new(hash.to_vec()))
}
}
}
| 31.021351 | 104 | 0.597255 |
d6ecee2a736820b6c5df3348391effade52910ce | 17,177 | //!
//! Extended uniform poly meshes. This module defines an extension of uniform polygon meshes like
//! TriMesh and QuadMesh that are accompanied by their own dual topologies.
//!
use super::{QuadMesh, TriMesh};
use crate::attrib::*;
use crate::mesh::topology::*;
use crate::mesh::vertex_positions::VertexPositions;
use crate::prim::Triangle;
use crate::Real;
use std::slice::{Iter, IterMut};
/*
* Commonly used meshes and their implementations.
*/
// Implement indexing for the Vec types used in meshes.
// The macro `impl_index_for` is defined in the `index` module.
impl_index_for!(Vec<usize> by FaceEdgeIndex, FaceVertexIndex, VertexCellIndex, VertexFaceIndex);
macro_rules! impl_uniform_surface_mesh {
($mesh_type:ident, $base_type:ident, $verts_per_face:expr) => {
impl<T: Real> $mesh_type<T> {
pub fn new(verts: Vec<[T; 3]>, indices: Vec<[usize; $verts_per_face]>) -> $mesh_type<T> {
let (face_indices, face_offsets) =
Self::compute_dual_topology(verts.len(), &indices);
$mesh_type {
base_mesh: $base_type::new(verts, indices),
face_indices,
face_offsets,
}
}
/// Compute the `face_indices` and `face_offsets` fields of this uniform mesh.
pub(crate) fn compute_dual_topology(
num_verts: usize,
indices: &[[usize; $verts_per_face]],
) -> (Vec<usize>, Vec<usize>) {
let mut face_indices = Vec::new();
face_indices.resize(num_verts, Vec::new());
for (fidx, face) in indices.iter().enumerate() {
for &vidx in face {
face_indices[vidx].push(fidx);
}
}
let mut face_offsets = Vec::with_capacity(indices.len());
face_offsets.push(0);
for neighbours in face_indices.iter() {
let last = *face_offsets.last().unwrap();
face_offsets.push(last + neighbours.len());
}
(
face_indices
.iter()
.flat_map(|x| x.iter().cloned())
.collect(),
face_offsets,
)
}
/// Iterate over each face.
pub fn face_iter(&self) -> Iter<[usize; $verts_per_face]> {
self.base_mesh.face_iter()
}
/// Iterate mutably over each face.
pub fn face_iter_mut(&mut self) -> IterMut<[usize; $verts_per_face]> {
self.base_mesh.face_iter_mut()
}
/// Face accessor. These are vertex indices.
#[inline]
pub fn face(&self, fidx: FaceIndex) -> &[usize; $verts_per_face] {
self.base_mesh.face(fidx)
}
/// Return a slice of individual faces.
#[inline]
pub fn faces(&self) -> &[[usize; $verts_per_face]] {
self.base_mesh.faces()
}
/// Reverse the order of each polygon in this mesh.
#[inline]
pub fn reverse(&mut self) {
self.base_mesh.reverse()
}
/// Reverse the order of each polygon in this mesh. This is the consuming version of the
/// `reverse` method.
#[inline]
pub fn reversed(mut self) -> Self {
self.reverse();
self
}
/// Sort vertices by the given key values.
pub fn sort_vertices_by_key<K, F>(&mut self, f: F)
where
F: FnMut(usize) -> K,
K: Ord,
{
// Ensure we have at least one vertex.
if self.num_vertices() == 0 {
return;
}
let $mesh_type {
ref mut base_mesh,
ref mut face_indices,
ref mut face_offsets,
.. // face and face_{vertex,edge} attributes are unchanged
} = *self;
let order = base_mesh.sort_vertices_by_key(f);
// Can't easily do this in place, so just for simplicity's sake we use extra memory
// for transferring dual topology.
let orig_face_indices = face_indices.clone();
let orig_face_offsets = face_offsets.clone();
// Note: The first and last offsets don't change.
let mut prev_off = 0;
for (idx, off) in face_offsets.iter_mut().skip(1).enumerate() {
let orig_idx = order[idx];
let prev_orig_off = orig_face_offsets[orig_idx];
let orig_off = orig_face_offsets[orig_idx + 1];
for (idx, &orig_idx) in face_indices[prev_off..]
.iter_mut()
.zip(orig_face_indices[prev_orig_off..orig_off].iter())
{
*idx = orig_idx;
}
*off = prev_off + orig_off - prev_orig_off;
prev_off = *off;
}
}
}
impl<T: Real> NumVertices for $mesh_type<T> {
fn num_vertices(&self) -> usize {
self.base_mesh.num_vertices()
}
}
impl<T: Real> NumFaces for $mesh_type<T> {
fn num_faces(&self) -> usize {
self.base_mesh.num_faces()
}
}
impl<T: Real> FaceVertex for $mesh_type<T> {
#[inline]
fn vertex<FVI>(&self, fv_idx: FVI) -> VertexIndex
where
FVI: Copy + Into<FaceVertexIndex>,
{
self.base_mesh.vertex(fv_idx)
}
#[inline]
fn face_vertex<FI>(&self, fidx: FI, which: usize) -> Option<FaceVertexIndex>
where
FI: Copy + Into<FaceIndex>,
{
self.base_mesh.face_vertex(fidx.into(), which)
}
#[inline]
fn num_face_vertices(&self) -> usize {
self.base_mesh.num_face_vertices()
}
#[inline]
fn num_vertices_at_face<FI>(&self, fidx: FI) -> usize
where
FI: Copy + Into<FaceIndex>,
{
self.base_mesh.num_vertices_at_face(fidx.into())
}
}
impl<T: Real> FaceEdge for $mesh_type<T> {
#[inline]
fn edge<FEI>(&self, fe_idx: FEI) -> EdgeIndex
where
FEI: Copy + Into<FaceEdgeIndex>,
{
self.base_mesh.edge(fe_idx)
}
#[inline]
fn face_edge<FI>(&self, fidx: FI, which: usize) -> Option<FaceEdgeIndex>
where
FI: Copy + Into<FaceIndex>,
{
self.base_mesh.face_edge(fidx.into(), which)
}
#[inline]
fn num_face_edges(&self) -> usize {
self.base_mesh.num_face_edges()
}
#[inline]
fn num_edges_at_face<FI>(&self, fidx: FI) -> usize
where
FI: Copy + Into<FaceIndex>,
{
self.base_mesh.num_edges_at_face(fidx.into())
}
}
impl<T: Real> VertexFace for $mesh_type<T> {
#[inline]
fn face<VFI>(&self, vf_idx: VFI) -> FaceIndex
where
VFI: Copy + Into<VertexFaceIndex>,
{
let vf_idx = usize::from(vf_idx.into());
debug_assert!(vf_idx < self.num_vertex_faces());
self.face_indices[vf_idx].into()
}
#[inline]
fn vertex_face<VI>(&self, vidx: VI, which: usize) -> Option<VertexFaceIndex>
where
VI: Copy + Into<VertexIndex>,
{
if which >= self.num_faces_at_vertex(vidx) {
return None;
}
let vidx = usize::from(vidx.into());
debug_assert!(vidx < self.num_vertices());
Some((self.face_offsets[vidx] + which).into())
}
#[inline]
fn num_vertex_faces(&self) -> usize {
self.face_indices.len()
}
#[inline]
fn num_faces_at_vertex<VI>(&self, vidx: VI) -> usize
where
VI: Copy + Into<VertexIndex>,
{
let vidx = usize::from(vidx.into());
self.face_offsets[vidx + 1] - self.face_offsets[vidx]
}
}
impl<T: Real> Default for $mesh_type<T> {
/// Produce an empty mesh. This is not particularly useful on its own, however it can be
/// used as a null case for various mesh algorithms.
fn default() -> Self {
$mesh_type::new(vec![], vec![])
}
}
};
}
#[derive(Clone, Debug, PartialEq, Attrib, Intrinsic)]
pub struct TriMeshExt<T: Real> {
/// Vertex positions.
#[attributes(Vertex, Face, FaceVertex, FaceEdge)]
#[intrinsics(VertexPositions::vertex_positions)]
pub base_mesh: TriMesh<T>,
/// Lists of face indices for each vertex. Since each vertex can have a variable number of face
/// neighbours, the `face_offsets` field keeps track of where each subarray of indices begins.
pub face_indices: Vec<usize>,
/// Offsets into the `face_indices` array, one for each vertex. The last offset is always
/// equal to the size of `face_indices` for convenience.
pub face_offsets: Vec<usize>,
}
#[derive(Clone, Debug, PartialEq, Attrib, Intrinsic)]
pub struct QuadMeshExt<T: Real> {
/// Vertex positions.
#[attributes(Vertex, Face, FaceVertex, FaceEdge)]
#[intrinsics(VertexPositions::vertex_positions)]
pub base_mesh: QuadMesh<T>,
/// Lists of face indices for each vertex. Since each vertex can have a variable number of face
/// neighbours, the `face_offsets` field keeps track of where each subarray of indices begins.
pub face_indices: Vec<usize>,
/// Offsets into the `face_indices` array, one for each vertex. The last offset is always
/// equal to the size of `face_indices` for convenience.
pub face_offsets: Vec<usize>,
}
impl_uniform_surface_mesh!(TriMeshExt, TriMesh, 3);
impl_uniform_surface_mesh!(QuadMeshExt, QuadMesh, 4);
impl<T: Real> TriMeshExt<T> {
/// Triangle iterator.
///
/// ```
/// use meshx::mesh::TriMeshExt;
/// use meshx::prim::Triangle;
///
/// let verts = vec![[0.0, 0.0, 0.0], [0.0, 0.0, 1.0], [0.0, 1.0, 0.0]];
/// let mesh = TriMeshExt::new(verts.clone(), vec![[0, 1, 2]]);
/// let tri = Triangle::from_indexed_slice(&[0, 1, 2], verts.as_slice());
/// assert_eq!(Some(tri), mesh.tri_iter().next());
/// ```
#[inline]
pub fn tri_iter(&self) -> impl Iterator<Item = Triangle<T>> + '_ {
self.base_mesh.tri_iter()
}
/// Get a tetrahedron primitive corresponding to the given vertex indices.
#[inline]
pub fn tri_from_indices(&self, indices: &[usize; 3]) -> Triangle<T> {
self.base_mesh.tri_from_indices(indices)
}
}
/// Convert a triangle mesh to a polygon mesh.
// TODO: Improve this algorithm with ear clipping:
// https://www.geometrictools.com/Documentation/TriangulationByEarClipping.pdf
impl<T: Real> From<super::PolyMesh<T>> for TriMeshExt<T> {
fn from(mesh: super::PolyMesh<T>) -> TriMeshExt<T> {
let base_mesh = TriMesh::from(mesh);
let (face_indices, face_offsets) = TriMeshExt::<T>::compute_dual_topology(
base_mesh.vertex_positions.len(),
base_mesh.indices.as_slice(),
);
TriMeshExt {
base_mesh,
face_indices,
face_offsets,
}
}
}
macro_rules! impl_mesh_convert {
($ext_mesh:ident <-> $base_mesh:ident) => {
impl<T: Real> From<$base_mesh<T>> for $ext_mesh<T> {
fn from(base_mesh: $base_mesh<T>) -> $ext_mesh<T> {
// TODO: Refactor unnecessary unsafe block
let flat_indices = bytemuck::cast_slice(base_mesh.indices.as_slice());
let (face_indices, face_offsets) =
Self::compute_dual_topology(base_mesh.vertex_positions.len(), flat_indices);
$ext_mesh {
base_mesh,
face_indices,
face_offsets,
}
}
}
impl<T: Real> From<$ext_mesh<T>> for $base_mesh<T> {
fn from(ext: $ext_mesh<T>) -> $base_mesh<T> {
ext.base_mesh
}
}
};
}
impl_mesh_convert!(TriMeshExt <-> TriMesh);
impl_mesh_convert!(QuadMeshExt <-> QuadMesh);
#[cfg(test)]
mod tests {
use super::*;
use crate::index::Index;
#[test]
fn mesh_sort() {
// Sort -> check for inequality -> sort to original -> check for equality.
let pts = vec![
[0.0, 0.0, 0.0],
[1.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[1.0, 1.0, 0.0],
[1.0, 1.0, 1.0],
];
let indices = vec![[0, 1, 2], [1, 3, 2], [0, 2, 4]];
let mut trimesh = TriMeshExt::new(pts, indices);
let orig_trimesh = trimesh.clone();
let values = [3, 2, 1, 4, 0];
trimesh.sort_vertices_by_key(|k| values[k]);
assert_ne!(trimesh, orig_trimesh);
let rev_values = [4, 2, 1, 0, 3];
trimesh.sort_vertices_by_key(|k| rev_values[k]);
assert_eq!(trimesh, orig_trimesh);
// Verify exact values.
trimesh
.insert_attrib_data::<usize, VertexIndex>("i", vec![0, 1, 2, 3, 4])
.unwrap();
trimesh.sort_vertices_by_key(|k| values[k]);
assert_eq!(
trimesh.vertex_positions(),
&[
[1.0, 1.0, 1.0],
[0.0, 1.0, 0.0],
[1.0, 0.0, 0.0],
[0.0, 0.0, 0.0],
[1.0, 1.0, 0.0],
]
);
// `rev_values` actually already corresponds to 0..=4 being sorted by `values`.
assert_eq!(
trimesh.attrib_as_slice::<usize, VertexIndex>("i").unwrap(),
&rev_values[..]
);
assert_eq!(
trimesh.base_mesh.indices.as_slice(),
&[[3, 2, 1], [2, 4, 1], [3, 1, 0]]
);
}
#[test]
fn two_triangles_test() {
let pts = vec![
[0.0, 0.0, 0.0],
[1.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[1.0, 1.0, 0.0],
];
let indices = vec![[0, 1, 2], [1, 3, 2]];
let trimesh = TriMeshExt::new(pts, indices);
assert_eq!(trimesh.num_vertices(), 4);
assert_eq!(trimesh.num_faces(), 2);
assert_eq!(trimesh.num_face_vertices(), 6);
assert_eq!(trimesh.num_face_edges(), 6);
assert_eq!(Index::from(trimesh.face_to_vertex(1, 1)), 3);
assert_eq!(Index::from(trimesh.face_to_vertex(0, 2)), 2);
assert_eq!(Index::from(trimesh.face_edge(1, 0)), 3);
let mut face_iter = trimesh.face_iter();
assert_eq!(face_iter.next(), Some(&[0usize, 1, 2]));
assert_eq!(face_iter.next(), Some(&[1usize, 3, 2]));
// Verify dual topology
let vertex_faces = vec![vec![0], vec![0, 1], vec![0, 1], vec![1]];
for i in 0..vertex_faces.len() {
assert_eq!(trimesh.num_faces_at_vertex(i), vertex_faces[i].len());
let mut local_faces: Vec<usize> = (0..trimesh.num_faces_at_vertex(i))
.map(|j| trimesh.vertex_to_face(i, j).unwrap().into())
.collect();
local_faces.sort();
assert_eq!(local_faces, vertex_faces[i]);
}
}
/// Test converting from a `PolyMesh` into a `TriMeshExt`, which is a non-trivial operation since
/// it involves trianguating polygons.
#[test]
fn from_polymesh_test() {
let points = vec![
[0.0, 0.0, 0.0],
[1.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[1.0, 1.0, 0.0],
[0.0, 0.0, 1.0],
[1.0, 0.0, 1.0],
];
let faces = vec![
3, 0, 1, 2, // first triangle
4, 0, 1, 5, 4, // quadrilateral
3, 1, 3, 2, // second triangle
];
let polymesh = crate::mesh::PolyMesh::new(points.clone(), &faces);
let trimesh = TriMeshExt::new(
points.clone(),
vec![[0, 1, 2], [0, 1, 5], [0, 5, 4], [1, 3, 2]],
);
assert_eq!(trimesh, TriMeshExt::from(polymesh));
}
}
| 34.841785 | 134 | 0.501601 |
1d92e8fc97c7aa5690389bc17a75f5f942f75ebb | 18,510 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Implementation of `std::os` functionality for unix systems
#![allow(unused_imports)] // lots of cfg code here
use os::unix::prelude::*;
use error::Error as StdError;
use ffi::{CString, CStr, OsString, OsStr};
use fmt;
use io;
use iter;
use libc::{self, c_int, c_char, c_void};
use marker::PhantomData;
use mem;
use memchr;
use path::{self, PathBuf};
use ptr;
use slice;
use str;
use sys_common::mutex::Mutex;
use sys::cvt;
use sys::fd;
use vec;
const TMPBUF_SZ: usize = 128;
static ENV_LOCK: Mutex = Mutex::new();
extern {
#[cfg(not(target_os = "dragonfly"))]
#[cfg_attr(any(target_os = "linux",
target_os = "emscripten",
target_os = "fuchsia",
target_os = "l4re"),
link_name = "__errno_location")]
#[cfg_attr(any(target_os = "bitrig",
target_os = "netbsd",
target_os = "openbsd",
target_os = "android",
target_os = "hermit",
target_env = "newlib"),
link_name = "__errno")]
#[cfg_attr(target_os = "solaris", link_name = "___errno")]
#[cfg_attr(any(target_os = "macos",
target_os = "ios",
target_os = "freebsd"),
link_name = "__error")]
#[cfg_attr(target_os = "haiku", link_name = "_errnop")]
fn errno_location() -> *mut c_int;
}
/// Returns the platform-specific value of errno
#[cfg(not(target_os = "dragonfly"))]
pub fn errno() -> i32 {
unsafe {
(*errno_location()) as i32
}
}
/// Sets the platform-specific value of errno
#[cfg(any(target_os = "solaris", target_os = "fuchsia"))] // only needed for readdir so far
pub fn set_errno(e: i32) {
unsafe {
*errno_location() = e as c_int
}
}
#[cfg(target_os = "dragonfly")]
pub fn errno() -> i32 {
extern {
#[thread_local]
static errno: c_int;
}
unsafe { errno as i32 }
}
/// Gets a detailed string description for the given error number.
pub fn error_string(errno: i32) -> String {
extern {
#[cfg_attr(any(target_os = "linux", target_env = "newlib"),
link_name = "__xpg_strerror_r")]
fn strerror_r(errnum: c_int, buf: *mut c_char,
buflen: libc::size_t) -> c_int;
}
let mut buf = [0 as c_char; TMPBUF_SZ];
let p = buf.as_mut_ptr();
unsafe {
if strerror_r(errno as c_int, p, buf.len()) < 0 {
panic!("strerror_r failure");
}
let p = p as *const _;
str::from_utf8(CStr::from_ptr(p).to_bytes()).unwrap().to_owned()
}
}
pub fn getcwd() -> io::Result<PathBuf> {
let mut buf = Vec::with_capacity(512);
loop {
unsafe {
let ptr = buf.as_mut_ptr() as *mut libc::c_char;
if !libc::getcwd(ptr, buf.capacity()).is_null() {
let len = CStr::from_ptr(buf.as_ptr() as *const libc::c_char).to_bytes().len();
buf.set_len(len);
buf.shrink_to_fit();
return Ok(PathBuf::from(OsString::from_vec(buf)));
} else {
let error = io::Error::last_os_error();
if error.raw_os_error() != Some(libc::ERANGE) {
return Err(error);
}
}
// Trigger the internal buffer resizing logic of `Vec` by requiring
// more space than the current capacity.
let cap = buf.capacity();
buf.set_len(cap);
buf.reserve(1);
}
}
}
pub fn chdir(p: &path::Path) -> io::Result<()> {
let p: &OsStr = p.as_ref();
let p = CString::new(p.as_bytes())?;
unsafe {
match libc::chdir(p.as_ptr()) == (0 as c_int) {
true => Ok(()),
false => Err(io::Error::last_os_error()),
}
}
}
pub struct SplitPaths<'a> {
iter: iter::Map<slice::Split<'a, u8, fn(&u8) -> bool>,
fn(&'a [u8]) -> PathBuf>,
}
pub fn split_paths(unparsed: &OsStr) -> SplitPaths {
fn bytes_to_path(b: &[u8]) -> PathBuf {
PathBuf::from(<OsStr as OsStrExt>::from_bytes(b))
}
fn is_colon(b: &u8) -> bool { *b == b':' }
let unparsed = unparsed.as_bytes();
SplitPaths {
iter: unparsed.split(is_colon as fn(&u8) -> bool)
.map(bytes_to_path as fn(&[u8]) -> PathBuf)
}
}
impl<'a> Iterator for SplitPaths<'a> {
type Item = PathBuf;
fn next(&mut self) -> Option<PathBuf> { self.iter.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
#[derive(Debug)]
pub struct JoinPathsError;
pub fn join_paths<I, T>(paths: I) -> Result<OsString, JoinPathsError>
where I: Iterator<Item=T>, T: AsRef<OsStr>
{
let mut joined = Vec::new();
let sep = b':';
for (i, path) in paths.enumerate() {
let path = path.as_ref().as_bytes();
if i > 0 { joined.push(sep) }
if path.contains(&sep) {
return Err(JoinPathsError)
}
joined.extend_from_slice(path);
}
Ok(OsStringExt::from_vec(joined))
}
impl fmt::Display for JoinPathsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
"path segment contains separator `:`".fmt(f)
}
}
impl StdError for JoinPathsError {
fn description(&self) -> &str { "failed to join paths" }
}
#[cfg(any(target_os = "freebsd", target_os = "dragonfly"))]
pub fn current_exe() -> io::Result<PathBuf> {
unsafe {
let mut mib = [libc::CTL_KERN as c_int,
libc::KERN_PROC as c_int,
libc::KERN_PROC_PATHNAME as c_int,
-1 as c_int];
let mut sz = 0;
cvt(libc::sysctl(mib.as_mut_ptr(), mib.len() as ::libc::c_uint,
ptr::null_mut(), &mut sz, ptr::null_mut(), 0))?;
if sz == 0 {
return Err(io::Error::last_os_error())
}
let mut v: Vec<u8> = Vec::with_capacity(sz);
cvt(libc::sysctl(mib.as_mut_ptr(), mib.len() as ::libc::c_uint,
v.as_mut_ptr() as *mut libc::c_void, &mut sz,
ptr::null_mut(), 0))?;
if sz == 0 {
return Err(io::Error::last_os_error());
}
v.set_len(sz - 1); // chop off trailing NUL
Ok(PathBuf::from(OsString::from_vec(v)))
}
}
#[cfg(target_os = "netbsd")]
pub fn current_exe() -> io::Result<PathBuf> {
fn sysctl() -> io::Result<PathBuf> {
unsafe {
let mib = [libc::CTL_KERN, libc::KERN_PROC_ARGS, -1, libc::KERN_PROC_PATHNAME];
let mut path_len: usize = 0;
cvt(libc::sysctl(mib.as_ptr(), mib.len() as ::libc::c_uint,
ptr::null_mut(), &mut path_len,
ptr::null(), 0))?;
if path_len <= 1 {
return Err(io::Error::new(io::ErrorKind::Other,
"KERN_PROC_PATHNAME sysctl returned zero-length string"))
}
let mut path: Vec<u8> = Vec::with_capacity(path_len);
cvt(libc::sysctl(mib.as_ptr(), mib.len() as ::libc::c_uint,
path.as_ptr() as *mut libc::c_void, &mut path_len,
ptr::null(), 0))?;
path.set_len(path_len - 1); // chop off NUL
Ok(PathBuf::from(OsString::from_vec(path)))
}
}
fn procfs() -> io::Result<PathBuf> {
let curproc_exe = path::Path::new("/proc/curproc/exe");
if curproc_exe.is_file() {
return ::fs::read_link(curproc_exe);
}
Err(io::Error::new(io::ErrorKind::Other,
"/proc/curproc/exe doesn't point to regular file."))
}
sysctl().or_else(|_| procfs())
}
#[cfg(any(target_os = "bitrig", target_os = "openbsd"))]
pub fn current_exe() -> io::Result<PathBuf> {
unsafe {
let mut mib = [libc::CTL_KERN,
libc::KERN_PROC_ARGS,
libc::getpid(),
libc::KERN_PROC_ARGV];
let mib = mib.as_mut_ptr();
let mut argv_len = 0;
cvt(libc::sysctl(mib, 4, ptr::null_mut(), &mut argv_len,
ptr::null_mut(), 0))?;
let mut argv = Vec::<*const libc::c_char>::with_capacity(argv_len as usize);
cvt(libc::sysctl(mib, 4, argv.as_mut_ptr() as *mut _,
&mut argv_len, ptr::null_mut(), 0))?;
argv.set_len(argv_len as usize);
if argv[0].is_null() {
return Err(io::Error::new(io::ErrorKind::Other,
"no current exe available"))
}
let argv0 = CStr::from_ptr(argv[0]).to_bytes();
if argv0[0] == b'.' || argv0.iter().any(|b| *b == b'/') {
::fs::canonicalize(OsStr::from_bytes(argv0))
} else {
Ok(PathBuf::from(OsStr::from_bytes(argv0)))
}
}
}
#[cfg(any(target_os = "linux", target_os = "android", target_os = "emscripten"))]
pub fn current_exe() -> io::Result<PathBuf> {
let selfexe = PathBuf::from("/proc/self/exe");
if selfexe.exists() {
::fs::read_link(selfexe)
} else {
Err(io::Error::new(io::ErrorKind::Other, "no /proc/self/exe available. Is /proc mounted?"))
}
}
#[cfg(any(target_os = "macos", target_os = "ios"))]
pub fn current_exe() -> io::Result<PathBuf> {
extern {
fn _NSGetExecutablePath(buf: *mut libc::c_char,
bufsize: *mut u32) -> libc::c_int;
}
unsafe {
let mut sz: u32 = 0;
_NSGetExecutablePath(ptr::null_mut(), &mut sz);
if sz == 0 { return Err(io::Error::last_os_error()); }
let mut v: Vec<u8> = Vec::with_capacity(sz as usize);
let err = _NSGetExecutablePath(v.as_mut_ptr() as *mut i8, &mut sz);
if err != 0 { return Err(io::Error::last_os_error()); }
v.set_len(sz as usize - 1); // chop off trailing NUL
Ok(PathBuf::from(OsString::from_vec(v)))
}
}
#[cfg(any(target_os = "solaris"))]
pub fn current_exe() -> io::Result<PathBuf> {
extern {
fn getexecname() -> *const c_char;
}
unsafe {
let path = getexecname();
if path.is_null() {
Err(io::Error::last_os_error())
} else {
let filename = CStr::from_ptr(path).to_bytes();
let path = PathBuf::from(<OsStr as OsStrExt>::from_bytes(filename));
// Prepend a current working directory to the path if
// it doesn't contain an absolute pathname.
if filename[0] == b'/' {
Ok(path)
} else {
getcwd().map(|cwd| cwd.join(path))
}
}
}
}
#[cfg(target_os = "haiku")]
pub fn current_exe() -> io::Result<PathBuf> {
// Use Haiku's image info functions
#[repr(C)]
struct image_info {
id: i32,
type_: i32,
sequence: i32,
init_order: i32,
init_routine: *mut libc::c_void, // function pointer
term_routine: *mut libc::c_void, // function pointer
device: libc::dev_t,
node: libc::ino_t,
name: [libc::c_char; 1024], // MAXPATHLEN
text: *mut libc::c_void,
data: *mut libc::c_void,
text_size: i32,
data_size: i32,
api_version: i32,
abi: i32,
}
unsafe {
extern {
fn _get_next_image_info(team_id: i32, cookie: *mut i32,
info: *mut image_info, size: i32) -> i32;
}
let mut info: image_info = mem::zeroed();
let mut cookie: i32 = 0;
// the executable can be found at team id 0
let result = _get_next_image_info(0, &mut cookie, &mut info,
mem::size_of::<image_info>() as i32);
if result != 0 {
use io::ErrorKind;
Err(io::Error::new(ErrorKind::Other, "Error getting executable path"))
} else {
let name = CStr::from_ptr(info.name.as_ptr()).to_bytes();
Ok(PathBuf::from(OsStr::from_bytes(name)))
}
}
}
#[cfg(any(target_os = "fuchsia", target_os = "l4re", target_os = "hermit"))]
pub fn current_exe() -> io::Result<PathBuf> {
use io::ErrorKind;
Err(io::Error::new(ErrorKind::Other, "Not yet implemented!"))
}
pub struct Env {
iter: vec::IntoIter<(OsString, OsString)>,
_dont_send_or_sync_me: PhantomData<*mut ()>,
}
impl Iterator for Env {
type Item = (OsString, OsString);
fn next(&mut self) -> Option<(OsString, OsString)> { self.iter.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
#[cfg(target_os = "macos")]
pub unsafe fn environ() -> *mut *const *const c_char {
extern { fn _NSGetEnviron() -> *mut *const *const c_char; }
_NSGetEnviron()
}
#[cfg(not(target_os = "macos"))]
pub unsafe fn environ() -> *mut *const *const c_char {
extern { static mut environ: *const *const c_char; }
&mut environ
}
/// Returns a vector of (variable, value) byte-vector pairs for all the
/// environment variables of the current process.
pub fn env() -> Env {
unsafe {
let _guard = ENV_LOCK.lock();
let mut environ = *environ();
if environ == ptr::null() {
panic!("os::env() failure getting env string from OS: {}",
io::Error::last_os_error());
}
let mut result = Vec::new();
while *environ != ptr::null() {
if let Some(key_value) = parse(CStr::from_ptr(*environ).to_bytes()) {
result.push(key_value);
}
environ = environ.offset(1);
}
return Env {
iter: result.into_iter(),
_dont_send_or_sync_me: PhantomData,
}
}
fn parse(input: &[u8]) -> Option<(OsString, OsString)> {
// Strategy (copied from glibc): Variable name and value are separated
// by an ASCII equals sign '='. Since a variable name must not be
// empty, allow variable names starting with an equals sign. Skip all
// malformed lines.
if input.is_empty() {
return None;
}
let pos = memchr::memchr(b'=', &input[1..]).map(|p| p + 1);
pos.map(|p| (
OsStringExt::from_vec(input[..p].to_vec()),
OsStringExt::from_vec(input[p+1..].to_vec()),
))
}
}
pub fn getenv(k: &OsStr) -> io::Result<Option<OsString>> {
// environment variables with a nul byte can't be set, so their value is
// always None as well
let k = CString::new(k.as_bytes())?;
unsafe {
let _guard = ENV_LOCK.lock();
let s = libc::getenv(k.as_ptr()) as *const libc::c_char;
let ret = if s.is_null() {
None
} else {
Some(OsStringExt::from_vec(CStr::from_ptr(s).to_bytes().to_vec()))
};
Ok(ret)
}
}
pub fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> {
let k = CString::new(k.as_bytes())?;
let v = CString::new(v.as_bytes())?;
unsafe {
let _guard = ENV_LOCK.lock();
cvt(libc::setenv(k.as_ptr(), v.as_ptr(), 1)).map(|_| ())
}
}
pub fn unsetenv(n: &OsStr) -> io::Result<()> {
let nbuf = CString::new(n.as_bytes())?;
unsafe {
let _guard = ENV_LOCK.lock();
cvt(libc::unsetenv(nbuf.as_ptr())).map(|_| ())
}
}
pub fn page_size() -> usize {
unsafe {
libc::sysconf(libc::_SC_PAGESIZE) as usize
}
}
pub fn temp_dir() -> PathBuf {
::env::var_os("TMPDIR").map(PathBuf::from).unwrap_or_else(|| {
if cfg!(target_os = "android") {
PathBuf::from("/data/local/tmp")
} else {
PathBuf::from("/tmp")
}
})
}
pub fn home_dir() -> Option<PathBuf> {
return ::env::var_os("HOME").or_else(|| unsafe {
fallback()
}).map(PathBuf::from);
#[cfg(any(target_os = "android",
target_os = "ios",
target_os = "emscripten"))]
unsafe fn fallback() -> Option<OsString> { None }
#[cfg(not(any(target_os = "android",
target_os = "ios",
target_os = "emscripten")))]
unsafe fn fallback() -> Option<OsString> {
let amt = match libc::sysconf(libc::_SC_GETPW_R_SIZE_MAX) {
n if n < 0 => 512 as usize,
n => n as usize,
};
let mut buf = Vec::with_capacity(amt);
let mut passwd: libc::passwd = mem::zeroed();
let mut result = ptr::null_mut();
match libc::getpwuid_r(libc::getuid(), &mut passwd, buf.as_mut_ptr(),
buf.capacity(), &mut result) {
0 if !result.is_null() => {
let ptr = passwd.pw_dir as *const _;
let bytes = CStr::from_ptr(ptr).to_bytes().to_vec();
Some(OsStringExt::from_vec(bytes))
},
_ => None,
}
}
}
pub fn exit(code: i32) -> ! {
unsafe { libc::exit(code as c_int) }
}
pub fn getpid() -> u32 {
unsafe { libc::getpid() as u32 }
}
pub fn getppid() -> u32 {
unsafe { libc::getppid() as u32 }
}
#[cfg(target_env = "gnu")]
pub fn glibc_version() -> Option<(usize, usize)> {
if let Some(Ok(version_str)) = glibc_version_cstr().map(CStr::to_str) {
parse_glibc_version(version_str)
} else {
None
}
}
#[cfg(target_env = "gnu")]
fn glibc_version_cstr() -> Option<&'static CStr> {
weak! {
fn gnu_get_libc_version() -> *const libc::c_char
}
if let Some(f) = gnu_get_libc_version.get() {
unsafe { Some(CStr::from_ptr(f())) }
} else {
None
}
}
// Returns Some((major, minor)) if the string is a valid "x.y" version,
// ignoring any extra dot-separated parts. Otherwise return None.
#[cfg(target_env = "gnu")]
fn parse_glibc_version(version: &str) -> Option<(usize, usize)> {
let mut parsed_ints = version.split('.').map(str::parse::<usize>).fuse();
match (parsed_ints.next(), parsed_ints.next()) {
(Some(Ok(major)), Some(Ok(minor))) => Some((major, minor)),
_ => None
}
}
| 32.247387 | 99 | 0.544192 |
11fa6cbe849de58c19ad3a97546d04cec08b4cab | 22,290 | // Copyright 2016 Mozilla Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::compiler::{Cacheable, ColorMode, Compiler, CompilerArguments, CompileCommand, CompilerHasher, CompilerKind,
Compilation, HashResult};
#[cfg(feature = "dist-client")]
use crate::compiler::{NoopOutputsRewriter, OutputsRewriter};
use crate::dist;
#[cfg(feature = "dist-client")]
use crate::dist::pkg;
use futures::Future;
use futures_cpupool::CpuPool;
use crate::mock_command::CommandCreatorSync;
use std::borrow::Cow;
use std::collections::{HashMap, HashSet};
use std::ffi::{OsStr, OsString};
use std::fmt;
use std::fs;
use std::hash::Hash;
#[cfg(feature = "dist-client")]
use std::io;
use std::path::{Path, PathBuf};
use std::process;
use crate::util::{HashToDigest, Digest, hash_all};
use crate::errors::*;
/// A generic implementation of the `Compiler` trait for C/C++ compilers.
#[derive(Clone)]
pub struct CCompiler<I>
where I: CCompilerImpl,
{
executable: PathBuf,
executable_digest: String,
compiler: I,
}
/// A generic implementation of the `CompilerHasher` trait for C/C++ compilers.
#[derive(Debug, Clone)]
pub struct CCompilerHasher<I>
where I: CCompilerImpl,
{
parsed_args: ParsedArguments,
executable: PathBuf,
executable_digest: String,
compiler: I,
}
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum Language {
C,
Cxx,
ObjectiveC,
ObjectiveCxx,
}
/// The results of parsing a compiler commandline.
#[allow(dead_code)]
#[derive(Debug, PartialEq, Clone)]
pub struct ParsedArguments {
/// The input source file.
pub input: PathBuf,
/// The type of language used in the input source file.
pub language: Language,
/// The file in which to generate dependencies.
pub depfile: Option<PathBuf>,
/// Output files, keyed by a simple name, like "obj".
pub outputs: HashMap<&'static str, PathBuf>,
/// Commandline arguments for the preprocessor.
pub preprocessor_args: Vec<OsString>,
/// Commandline arguments for the preprocessor or the compiler.
pub common_args: Vec<OsString>,
/// Extra files that need to have their contents hashed.
pub extra_hash_files: Vec<PathBuf>,
/// Whether or not the `-showIncludes` argument is passed on MSVC
pub msvc_show_includes: bool,
/// Whether the compilation is generating profiling or coverage data.
pub profile_generate: bool,
}
impl ParsedArguments {
pub fn output_pretty(&self) -> Cow<'_, str> {
self.outputs.get("obj")
.and_then(|o| o.file_name())
.map(|s| s.to_string_lossy())
.unwrap_or(Cow::Borrowed("Unknown filename"))
}
}
impl Language {
pub fn from_file_name(file: &Path) -> Option<Self> {
match file.extension().and_then(|e| e.to_str()) {
Some("c") => Some(Language::C),
Some("cc") | Some("cpp") | Some("cxx") => Some(Language::Cxx),
Some("m") => Some(Language::ObjectiveC),
Some("mm") => Some(Language::ObjectiveCxx),
e => {
trace!("Unknown source extension: {}", e.unwrap_or("(None)"));
None
}
}
}
pub fn as_str(&self) -> &'static str {
match *self {
Language::C => "c",
Language::Cxx => "c++",
Language::ObjectiveC => "objc",
Language::ObjectiveCxx => "objc++",
}
}
}
/// A generic implementation of the `Compilation` trait for C/C++ compilers.
struct CCompilation<I: CCompilerImpl> {
parsed_args: ParsedArguments,
#[cfg(feature = "dist-client")]
preprocessed_input: Vec<u8>,
executable: PathBuf,
compiler: I,
cwd: PathBuf,
env_vars: Vec<(OsString, OsString)>,
}
/// Supported C compilers.
#[derive(Debug, PartialEq, Clone)]
pub enum CCompilerKind {
/// GCC
GCC,
/// clang
Clang,
/// Diab
Diab,
/// Microsoft Visual C++
MSVC,
}
/// An interface to a specific C compiler.
pub trait CCompilerImpl: Clone + fmt::Debug + Send + 'static {
/// Return the kind of compiler.
fn kind(&self) -> CCompilerKind;
/// Determine whether `arguments` are supported by this compiler.
fn parse_arguments(&self,
arguments: &[OsString],
cwd: &Path) -> CompilerArguments<ParsedArguments>;
/// Run the C preprocessor with the specified set of arguments.
fn preprocess<T>(&self,
creator: &T,
executable: &Path,
parsed_args: &ParsedArguments,
cwd: &Path,
env_vars: &[(OsString, OsString)],
may_dist: bool)
-> SFuture<process::Output> where T: CommandCreatorSync;
/// Generate a command that can be used to invoke the C compiler to perform
/// the compilation.
fn generate_compile_commands(&self,
path_transformer: &mut dist::PathTransformer,
executable: &Path,
parsed_args: &ParsedArguments,
cwd: &Path,
env_vars: &[(OsString, OsString)])
-> Result<(CompileCommand, Option<dist::CompileCommand>, Cacheable)>;
}
impl <I> CCompiler<I>
where I: CCompilerImpl,
{
pub fn new(compiler: I, executable: PathBuf, pool: &CpuPool) -> SFuture<CCompiler<I>>
{
Box::new(Digest::file(executable.clone(), &pool).map(move |digest| {
CCompiler {
executable: executable,
executable_digest: digest,
compiler: compiler,
}
}))
}
}
impl<T: CommandCreatorSync, I: CCompilerImpl> Compiler<T> for CCompiler<I> {
fn kind(&self) -> CompilerKind { CompilerKind::C(self.compiler.kind()) }
#[cfg(feature = "dist-client")]
fn get_toolchain_packager(&self) -> Box<dyn pkg::ToolchainPackager> {
Box::new(CToolchainPackager {
executable: self.executable.clone(),
kind: self.compiler.kind(),
})
}
fn parse_arguments(&self,
arguments: &[OsString],
cwd: &Path) -> CompilerArguments<Box<dyn CompilerHasher<T> + 'static>> {
match self.compiler.parse_arguments(arguments, cwd) {
CompilerArguments::Ok(args) => {
CompilerArguments::Ok(Box::new(CCompilerHasher {
parsed_args: args,
executable: self.executable.clone(),
executable_digest: self.executable_digest.clone(),
compiler: self.compiler.clone(),
}))
}
CompilerArguments::CannotCache(why, extra_info) => CompilerArguments::CannotCache(why, extra_info),
CompilerArguments::NotCompilation => CompilerArguments::NotCompilation,
}
}
fn box_clone(&self) -> Box<dyn Compiler<T>> {
Box::new((*self).clone())
}
}
impl<T, I> CompilerHasher<T> for CCompilerHasher<I>
where T: CommandCreatorSync,
I: CCompilerImpl,
{
fn generate_hash_key(self: Box<Self>,
creator: &T,
cwd: PathBuf,
env_vars: Vec<(OsString, OsString)>,
may_dist: bool,
pool: &CpuPool)
-> SFuture<HashResult>
{
let me = *self;
let CCompilerHasher { parsed_args, executable, executable_digest, compiler } = me;
let result = compiler.preprocess(creator, &executable, &parsed_args, &cwd, &env_vars, may_dist);
let out_pretty = parsed_args.output_pretty().into_owned();
let env_vars = env_vars.to_vec();
let result = result.map_err(move |e| {
debug!("[{}]: preprocessor failed: {:?}", out_pretty, e);
e
});
let out_pretty = parsed_args.output_pretty().into_owned();
let extra_hashes = hash_all(&parsed_args.extra_hash_files, &pool.clone());
let outputs = parsed_args.outputs.clone();
let args_cwd = cwd.clone();
Box::new(result.or_else(move |err| {
// Errors remove all traces of potential output.
debug!("removing files {:?}", &outputs);
let v: std::result::Result<(), std::io::Error> = outputs.values().fold(Ok(()), |r, f| {
r.and_then(|_| {
let mut path = (&args_cwd).clone();
path.push(&f);
match fs::metadata(&path) {
// File exists, remove it.
Ok(_) => fs::remove_file(&path),
_ => Ok(()),
}
})
});
if v.is_err() {
warn!("Could not remove files after preprocessing failed!\n");
}
match err {
Error(ErrorKind::ProcessError(output), _) => {
debug!("[{}]: preprocessor returned error status {:?}",
out_pretty,
output.status.code());
// Drop the stdout since it's the preprocessor output, just hand back stderr and
// the exit status.
bail!(ErrorKind::ProcessError(process::Output {
stdout: vec!(),
.. output
}))
}
e @ _ => Err(e),
}
}).and_then(move |preprocessor_result| {
trace!("[{}]: Preprocessor output is {} bytes",
parsed_args.output_pretty(),
preprocessor_result.stdout.len());
Box::new(extra_hashes.and_then(move |extra_hashes| {
let key = {
hash_key(&executable_digest,
parsed_args.language,
&parsed_args.common_args,
&extra_hashes,
&env_vars,
&preprocessor_result.stdout)
};
// A compiler binary may be a symlink to another and so has the same digest, but that means
// the toolchain will not contain the correct path to invoke the compiler! Add the compiler
// executable path to try and prevent this
let weak_toolchain_key = format!("{}-{}", executable.to_string_lossy(), executable_digest);
Ok(HashResult {
key: key,
compilation: Box::new(CCompilation {
parsed_args: parsed_args,
#[cfg(feature = "dist-client")]
preprocessed_input: preprocessor_result.stdout,
executable: executable,
compiler: compiler,
cwd,
env_vars,
}),
weak_toolchain_key,
})
}))
}))
}
fn color_mode(&self) -> ColorMode {
//TODO: actually implement this for C compilers
ColorMode::Auto
}
fn output_pretty(&self) -> Cow<'_, str>
{
self.parsed_args.output_pretty()
}
fn box_clone(&self) -> Box<dyn CompilerHasher<T>>
{
Box::new((*self).clone())
}
}
impl<I: CCompilerImpl> Compilation for CCompilation<I> {
fn generate_compile_commands(&self, path_transformer: &mut dist::PathTransformer)
-> Result<(CompileCommand, Option<dist::CompileCommand>, Cacheable)>
{
let CCompilation { ref parsed_args, ref executable, ref compiler, ref cwd, ref env_vars, .. } = *self;
compiler.generate_compile_commands(path_transformer, executable, parsed_args, cwd, env_vars)
}
#[cfg(feature = "dist-client")]
fn into_dist_packagers(self: Box<Self>, path_transformer: dist::PathTransformer) -> Result<(Box<dyn pkg::InputsPackager>, Box<dyn pkg::ToolchainPackager>, Box<dyn OutputsRewriter>)> {
let CCompilation { parsed_args, cwd, preprocessed_input, executable, compiler, .. } = *{self};
trace!("Dist inputs: {:?}", parsed_args.input);
let input_path = cwd.join(&parsed_args.input);
let inputs_packager = Box::new(CInputsPackager { input_path, preprocessed_input, path_transformer });
let toolchain_packager = Box::new(CToolchainPackager { executable, kind: compiler.kind() });
let outputs_rewriter = Box::new(NoopOutputsRewriter);
Ok((inputs_packager, toolchain_packager, outputs_rewriter))
}
fn outputs<'a>(&'a self) -> Box<dyn Iterator<Item=(&'a str, &'a Path)> + 'a>
{
Box::new(self.parsed_args.outputs.iter().map(|(k, v)| (*k, &**v)))
}
}
#[cfg(feature = "dist-client")]
struct CInputsPackager {
input_path: PathBuf,
path_transformer: dist::PathTransformer,
preprocessed_input: Vec<u8>,
}
#[cfg(feature = "dist-client")]
impl pkg::InputsPackager for CInputsPackager {
fn write_inputs(self: Box<Self>, wtr: &mut dyn io::Write) -> Result<dist::PathTransformer> {
let CInputsPackager { input_path, mut path_transformer, preprocessed_input } = *{self};
let input_path = pkg::simplify_path(&input_path)?;
let dist_input_path = path_transformer.to_dist(&input_path)
.chain_err(|| format!("unable to transform input path {}", input_path.display()))?;
let mut builder = tar::Builder::new(wtr);
let mut file_header = pkg::make_tar_header(&input_path, &dist_input_path)?;
file_header.set_size(preprocessed_input.len() as u64); // The metadata is from non-preprocessed
file_header.set_cksum();
builder.append(&file_header, preprocessed_input.as_slice())?;
// Finish archive
let _ = builder.into_inner();
Ok(path_transformer)
}
}
#[cfg(feature = "dist-client")]
#[allow(unused)]
struct CToolchainPackager {
executable: PathBuf,
kind: CCompilerKind,
}
#[cfg(feature = "dist-client")]
#[cfg(all(target_os = "linux", target_arch = "x86_64"))]
impl pkg::ToolchainPackager for CToolchainPackager {
fn write_pkg(self: Box<Self>, f: fs::File) -> Result<()> {
use std::os::unix::ffi::OsStringExt;
info!("Generating toolchain {}", self.executable.display());
let mut package_builder = pkg::ToolchainPackageBuilder::new();
package_builder.add_common()?;
package_builder.add_executable_and_deps(self.executable.clone())?;
// Helper to use -print-file-name and -print-prog-name to look up
// files by path.
let named_file = |kind: &str, name: &str| -> Option<PathBuf> {
let mut output = process::Command::new(&self.executable)
.arg(&format!("-print-{}-name={}", kind, name))
.output()
.ok()?;
debug!(
"find named {} {} output:\n{}\n===\n{}",
kind,
name,
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr),
);
if !output.status.success() {
debug!("exit failure");
return None;
}
// Remove the trailing newline (if present)
if output.stdout.last() == Some(&b'\n') {
output.stdout.pop();
}
// Create our PathBuf from the raw bytes, and return if absolute.
let path: PathBuf = OsString::from_vec(output.stdout).into();
if path.is_absolute() {
Some(path)
} else {
None
}
};
// Helper to add a named file/program by to the package.
// We ignore the case where the file doesn't exist, as we don't need it.
let add_named_prog = |builder: &mut pkg::ToolchainPackageBuilder, name: &str| -> Result<()> {
if let Some(path) = named_file("prog", name) {
builder.add_executable_and_deps(path)?;
}
Ok(())
};
let add_named_file = |builder: &mut pkg::ToolchainPackageBuilder, name: &str| -> Result<()> {
if let Some(path) = named_file("file", name) {
builder.add_file(path)?;
}
Ok(())
};
// Add basic |as| and |objcopy| programs.
add_named_prog(&mut package_builder, "as")?;
add_named_prog(&mut package_builder, "objcopy")?;
// Linker configuration.
if Path::new("/etc/ld.so.conf").is_file() {
package_builder.add_file("/etc/ld.so.conf".into())?;
}
// Compiler-specific handling
match self.kind {
CCompilerKind::Clang => {
// Clang uses internal header files, so add them.
if let Some(limits_h) = named_file("file", "include/limits.h") {
info!("limits_h = {}", limits_h.display());
package_builder.add_dir_contents(limits_h.parent().unwrap())?;
}
}
CCompilerKind::GCC => {
// Various external programs / files which may be needed by gcc
add_named_prog(&mut package_builder, "cc1")?;
add_named_prog(&mut package_builder, "cc1plus")?;
add_named_file(&mut package_builder, "specs")?;
add_named_file(&mut package_builder, "liblto_plugin.so")?;
}
_ => unreachable!(),
}
// Bundle into a compressed tarfile.
package_builder.into_compressed_tar(f)
}
}
/// The cache is versioned by the inputs to `hash_key`.
pub const CACHE_VERSION: &[u8] = b"7";
lazy_static! {
/// Environment variables that are factored into the cache key.
static ref CACHED_ENV_VARS: HashSet<&'static OsStr> = [
"MACOSX_DEPLOYMENT_TARGET",
"IPHONEOS_DEPLOYMENT_TARGET",
].iter().map(OsStr::new).collect();
}
/// Compute the hash key of `compiler` compiling `preprocessor_output` with `args`.
pub fn hash_key(compiler_digest: &str,
language: Language,
arguments: &[OsString],
extra_hashes: &[String],
env_vars: &[(OsString, OsString)],
preprocessor_output: &[u8]) -> String
{
// If you change any of the inputs to the hash, you should change `CACHE_VERSION`.
let mut m = Digest::new();
m.update(compiler_digest.as_bytes());
m.update(CACHE_VERSION);
m.update(language.as_str().as_bytes());
for arg in arguments {
arg.hash(&mut HashToDigest { digest: &mut m });
}
for hash in extra_hashes {
m.update(hash.as_bytes());
}
for &(ref var, ref val) in env_vars.iter() {
if CACHED_ENV_VARS.contains(var.as_os_str()) {
var.hash(&mut HashToDigest { digest: &mut m });
m.update(&b"="[..]);
val.hash(&mut HashToDigest { digest: &mut m });
}
}
m.update(preprocessor_output);
m.finish()
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_hash_key_executable_contents_differs() {
let args = ovec!["a", "b", "c"];
const PREPROCESSED : &'static [u8] = b"hello world";
assert_neq!(hash_key("abcd", Language::C, &args, &[], &[], &PREPROCESSED),
hash_key("wxyz", Language::C, &args, &[], &[], &PREPROCESSED));
}
#[test]
fn test_hash_key_args_differs() {
let digest = "abcd";
let abc = ovec!["a", "b", "c"];
let xyz = ovec!["x", "y", "z"];
let ab = ovec!["a", "b"];
let a = ovec!["a"];
const PREPROCESSED: &'static [u8] = b"hello world";
assert_neq!(hash_key(digest, Language::C, &abc, &[], &[], &PREPROCESSED),
hash_key(digest, Language::C, &xyz, &[], &[], &PREPROCESSED));
assert_neq!(hash_key(digest, Language::C, &abc, &[], &[], &PREPROCESSED),
hash_key(digest, Language::C, &ab, &[], &[], &PREPROCESSED));
assert_neq!(hash_key(digest, Language::C, &abc, &[], &[], &PREPROCESSED),
hash_key(digest, Language::C, &a, &[], &[], &PREPROCESSED));
}
#[test]
fn test_hash_key_preprocessed_content_differs() {
let args = ovec!["a", "b", "c"];
assert_neq!(hash_key("abcd", Language::C, &args, &[], &[], &b"hello world"[..]),
hash_key("abcd", Language::C, &args, &[], &[], &b"goodbye"[..]));
}
#[test]
fn test_hash_key_env_var_differs() {
let args = ovec!["a", "b", "c"];
let digest = "abcd";
const PREPROCESSED: &'static [u8] = b"hello world";
for var in CACHED_ENV_VARS.iter() {
let h1 = hash_key(digest, Language::C, &args, &[], &[], &PREPROCESSED);
let vars = vec![(OsString::from(var), OsString::from("something"))];
let h2 = hash_key(digest, Language::C, &args, &[], &vars, &PREPROCESSED);
let vars = vec![(OsString::from(var), OsString::from("something else"))];
let h3 = hash_key(digest, Language::C, &args, &[], &vars, &PREPROCESSED);
assert_neq!(h1, h2);
assert_neq!(h2, h3);
}
}
#[test]
fn test_extra_hash_data() {
let args = ovec!["a", "b", "c"];
let digest = "abcd";
const PREPROCESSED: &'static [u8] = b"hello world";
let extra_data = stringvec!["hello", "world"];
assert_neq!(hash_key(digest, Language::C, &args, &extra_data, &[], &PREPROCESSED),
hash_key(digest, Language::C, &args, &[], &[], &PREPROCESSED));
}
}
| 37.462185 | 187 | 0.561507 |
bb66006de58b8ab993ff71443f42282ef7a091e3 | 2,633 | use crate::{Point, MEAN_EARTH_RADIUS};
use num_traits::{Float, FromPrimitive};
/// Returns a new Point using the distance to the existing Point and a bearing for the direction
pub trait HaversineDestination<T: Float> {
/// Returns a new Point using distance to the existing Point and a bearing for the direction
///
/// # Units
///
/// - `bearing`: degrees
/// - `distance`: meters
///
/// # Examples
///
/// ```
/// use geo::Point;
/// use geo::algorithm::haversine_destination::HaversineDestination;
///
/// let p_1 = Point::<f64>::new(9.177789688110352, 48.776781529534965);
/// let p_2 = p_1.haversine_destination(45., 10000.);
/// assert_eq!(p_2, Point::<f64>::new(9.274410083250379, 48.84033282787534))
/// ```
fn haversine_destination(&self, bearing: T, distance: T) -> Point<T>;
}
impl<T> HaversineDestination<T> for Point<T>
where
T: Float + FromPrimitive,
{
fn haversine_destination(&self, bearing: T, distance: T) -> Point<T> {
let center_lng = self.x().to_radians();
let center_lat = self.y().to_radians();
let bearing_rad = bearing.to_radians();
let rad = distance / T::from(MEAN_EARTH_RADIUS).unwrap();
let lat =
{ center_lat.sin() * rad.cos() + center_lat.cos() * rad.sin() * bearing_rad.cos() }
.asin();
let lng = { bearing_rad.sin() * rad.sin() * center_lat.cos() }
.atan2(rad.cos() - center_lat.sin() * lat.sin())
+ center_lng;
Point::new(lng.to_degrees(), lat.to_degrees())
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::algorithm::haversine_distance::HaversineDistance;
use num_traits::pow;
#[test]
fn returns_a_new_point() {
let p_1 = Point::<f64>::new(9.177789688110352, 48.776781529534965);
let p_2 = p_1.haversine_destination(45., 10000.);
assert_eq!(p_2, Point::<f64>::new(9.274410083250379, 48.84033282787534));
let distance = p_1.haversine_distance(&p_2);
assert_relative_eq!(distance, 10000., epsilon = 1.0e-6)
}
#[test]
fn direct_and_indirect_destinations_are_close() {
let p_1 = Point::<f64>::new(9.177789688110352, 48.776781529534965);
let p_2 = p_1.haversine_destination(45., 10000.);
let square_edge = { pow(10000., 2) / 2. }.sqrt();
let p_3 = p_1.haversine_destination(0., square_edge);
let p_4 = p_3.haversine_destination(90., square_edge);
assert_relative_eq!(p_4.x(), p_2.x(), epsilon = 1.0e-6);
assert_relative_eq!(p_4.y(), p_2.y(), epsilon = 1.0e-6);
}
}
| 35.106667 | 96 | 0.612609 |
6746063d502845eb9dcf6d1ef86115640c68caa2 | 45,459 | #![allow(non_upper_case_globals)]
// Copyright (c) 2019, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
//
use crate::token_kind::TokenKind;
use ocamlrep_derive::{FromOcamlRep, ToOcamlRep};
use std::{borrow::Cow, cmp::Ordering};
// many errors are static strings, but not all of them
pub type Error = Cow<'static, str>;
#[derive(Debug, Clone, FromOcamlRep, ToOcamlRep, PartialEq, Eq)]
pub enum ErrorType {
ParseError,
RuntimeError,
}
#[derive(Debug, Clone, FromOcamlRep, ToOcamlRep, PartialEq, Eq)]
pub struct SyntaxError {
pub child: Option<Box<SyntaxError>>,
pub start_offset: usize,
pub end_offset: usize,
pub error_type: ErrorType,
pub message: Error,
}
impl SyntaxError {
pub fn make_with_child_and_type(
child: Option<SyntaxError>,
start_offset: usize,
end_offset: usize,
error_type: ErrorType,
message: Error,
) -> Self {
Self {
child: child.map(|x| Box::new(x)),
start_offset,
end_offset,
error_type,
message,
}
}
pub fn make(start_offset: usize, end_offset: usize, message: Error) -> Self {
Self::make_with_child_and_type(
None,
start_offset,
end_offset,
ErrorType::ParseError,
message,
)
}
pub fn compare_offset(e1: &Self, e2: &Self) -> Ordering {
(e1.start_offset, e1.end_offset).cmp(&(e2.start_offset, e2.end_offset))
}
pub fn equal_offset(e1: &Self, e2: &Self) -> bool {
Self::compare_offset(e1, e2) == Ordering::Equal
}
pub fn weak_equal(e1: &Self, e2: &Self) -> bool {
e1.start_offset == e2.start_offset
&& e1.end_offset == e2.end_offset
&& e1.message == e2.message
}
}
// Lexical errors
pub const error0001: Error = Cow::Borrowed("A hexadecimal literal needs at least one digit.");
pub const error0002: Error = Cow::Borrowed("A binary literal needs at least one digit.");
pub const error0003: Error = Cow::Borrowed(concat!(
"A floating point literal with an exponent needs at least ",
"one digit in the exponent."
));
pub const error0006: Error = Cow::Borrowed("This character is invalid.");
pub const error0007: Error = Cow::Borrowed("This delimited comment is not terminated.");
pub const error0008: Error = Cow::Borrowed("A name is expected here.");
pub const error0010: Error = Cow::Borrowed("A single quote is expected here.");
pub const error0011: Error = Cow::Borrowed("A newline is expected here.");
pub const error0012: Error = Cow::Borrowed("This string literal is not terminated.");
pub const error0013: Error = Cow::Borrowed("This XHP body is not terminated.");
pub const error0014: Error = Cow::Borrowed("This XHP comment is not terminated.");
// Syntactic errors
pub const error1001: Error = Cow::Borrowed("A .php file must begin with `<?hh`.");
pub const error1003: Error = Cow::Borrowed("The `function` keyword is expected here.");
pub const error1004: Error = Cow::Borrowed("A name is expected here.");
pub const error1006: Error = Cow::Borrowed("A right brace `}` is expected here.");
pub const error1007: Error = Cow::Borrowed("A type specifier is expected here.");
pub const error1008: Error = Cow::Borrowed("A variable name is expected here.");
pub const error1010: Error = Cow::Borrowed("A semicolon `;` is expected here.");
pub const error1011: Error = Cow::Borrowed("A right parenthesis `)` is expected here.");
pub const error1013: Error = Cow::Borrowed("A closing angle bracket `>` is expected here.");
pub const error1014: Error =
Cow::Borrowed("A closing angle bracket `>` or comma is expected here.");
pub const error1015: Error = Cow::Borrowed("An expression is expected here.");
pub const error1016: Error = Cow::Borrowed("An assignment is expected here.");
pub const error1017: Error = Cow::Borrowed("An XHP attribute value is expected here.");
pub const error1018: Error = Cow::Borrowed("The `while` keyword is expected here.");
pub const error1019: Error = Cow::Borrowed("A left parenthesis `(` is expected here.");
pub const error1020: Error = Cow::Borrowed("A colon `:` is expected here.");
pub const error1021: Error = Cow::Borrowed("An opening angle bracket `<` is expected here.");
// TODO: Remove this; redundant to 1009.
pub const error1022: Error =
Cow::Borrowed("A right parenthesis `)` or comma `,` is expected here.");
pub const error1023: Error = Cow::Borrowed("An `as` keyword is expected here.");
pub const error1025: Error = Cow::Borrowed("A shape field name is expected here.");
pub const error1026: Error = Cow::Borrowed("An opening square bracket `[` is expected here.");
pub const error1028: Error = Cow::Borrowed("An arrow `=>` is expected here.");
pub const error1029: Error = Cow::Borrowed("A closing double angle bracket `>>` is expected here.");
pub const error1031: Error =
Cow::Borrowed("A comma `,` or a closing square bracket `]` is expected here.");
pub const error1032: Error = Cow::Borrowed("A closing square bracket `]` is expected here.");
// TODO: Break this up according to classish type
pub const error1033: Error = Cow::Borrowed(concat!(
"A class member, method, type, trait usage, trait require, ",
"xhp attribute, xhp use, or xhp category is expected here."
));
pub const error1034: Error = Cow::Borrowed("A left brace `{` is expected here.");
pub const error1035: Error = Cow::Borrowed("The `class` keyword is expected here.");
pub const error1036: Error = Cow::Borrowed("An equals sign `=` is expected here.");
pub const error1037: Error = Cow::Borrowed("The `record` keyword is expected here.");
pub const error1038: Error = Cow::Borrowed("A semicolon `;` or a namespace body is expected here.");
pub const error1039: Error = Cow::Borrowed("A closing XHP tag is expected here.");
pub const error1041: Error = Cow::Borrowed("A function body or a semicolon `;` is expected here.");
pub const error1044: Error = Cow::Borrowed("A name or `__construct` keyword is expected here.");
pub const error1045: Error =
Cow::Borrowed("An `extends` or `implements` keyword is expected here.");
pub const error1046: Error = Cow::Borrowed("A lambda arrow `==>` is expected here.");
pub const error1047: Error = Cow::Borrowed("A scope resolution operator `::` is expected here.");
pub const error1048: Error = Cow::Borrowed("A name, variable name or `class` is expected here.");
pub const error1050: Error = Cow::Borrowed("A name or variable name is expected here.");
pub const error1051: Error =
Cow::Borrowed("The `required` or `lateinit` keyword is expected here.");
pub const error1052: Error =
Cow::Borrowed("An XHP category name beginning with a `%` is expected here.");
pub const error1053: Error = Cow::Borrowed("An XHP name or category name is expected here.");
pub const error1054: Error = Cow::Borrowed("A comma `,` is expected here.");
pub const error1055: Error = Cow::Borrowed(concat!(
"A fallthrough directive can only appear at the end of",
" a `switch` section."
));
// TODO(20052790): use the specific token's text in the message body.
pub const error1056: Error =
Cow::Borrowed("This token is not valid as part of a function declaration.");
pub fn error1057(text: &str) -> Error {
// TODO (kasper): T52404885: why does removing to_string() here segfaults
Cow::Owned(format!(
"Encountered unexpected token `{}`.",
text.to_string()
))
}
pub fn uppercase_kw(text: &str) -> Error {
Cow::Owned(format!(
"Keyword `{}` must be written in lowercase",
text.to_string()
))
}
pub fn error1058(received: &str, required: &str) -> Error {
Cow::Owned(format!(
"Encountered unexpected token `{}`. Did you mean `{}`?",
received.to_string(),
required.to_string()
))
}
pub fn error1059(terminator: TokenKind) -> Error {
Cow::Owned(format!(
"An `{}` is required when using alternate block syntax.",
terminator.to_string().to_string(),
))
}
pub fn error1060(extension: &str) -> Error {
let kind = if extension == "hack" {
"strict"
} else {
"partial"
};
Cow::Owned(format!(
"Leading markup and `<?hh` are not permitted in `.{}` files, which are always `{}`.",
extension.to_string(),
kind.to_string()
))
}
pub const error1063: Error = Cow::Borrowed("Expected matching separator here.");
pub const error1064: Error = Cow::Borrowed("XHP children declarations are no longer supported.");
pub const error1065: Error = Cow::Borrowed("A backtick ``` is expected here.");
pub const error2001: Error = Cow::Borrowed("A type annotation is required in `strict` mode.");
pub const error2003: Error =
Cow::Borrowed("A `case` statement may only appear directly inside a `switch`.");
pub const error2004: Error =
Cow::Borrowed("A `default` statement may only appear directly inside a `switch`.");
pub const error2005: Error =
Cow::Borrowed("A `break` statement may only appear inside a `switch` or loop.");
pub const error2006: Error = Cow::Borrowed("A `continue` statement may only appear inside a loop.");
pub const error2007: Error =
Cow::Borrowed("A `try` statement requires a `catch` or a `finally` clause.");
pub const error2008: Error = Cow::Borrowed(concat!(
"The first statement inside a `switch` statement must ",
"be a `case` or `default` label statement."
));
pub fn error2009(class_name: &str, method_name: &str) -> Error {
Cow::Owned(format!(
"Constructor `{}::{}()` cannot be static",
class_name.to_string(),
method_name.to_string(),
))
}
pub const error2010: Error = Cow::Borrowed(concat!(
"Parameters cannot have visibility modifiers (except in ",
"parameter lists of constructors)."
));
pub const error2014: Error = Cow::Borrowed("An abstract method cannot have a method body.");
pub fn error2015(class_name: &str, method_name: &str) -> Error {
Cow::Owned(format!(
"Non-abstract method `{}::{}` must contain body",
class_name.to_string(),
method_name.to_string(),
))
}
pub fn error2016(class_name: &str, method_name: &str) -> Error {
Cow::Owned(format!(
"Cannot declare abstract method `{}::{}` `private`",
class_name.to_string(),
method_name.to_string(),
))
}
pub const error2018: Error =
Cow::Borrowed("A constructor cannot have a non-`void` type annotation.");
pub fn error2019(class_name: &str, method_name: &str) -> Error {
Cow::Owned(format!(
"Cannot declare abstract method `{}::{}` `final`",
class_name.to_string(),
method_name.to_string(),
))
}
pub const error2020: Error = Cow::Borrowed(concat!(
"Use of the `{}` subscript operator is deprecated; ",
" use `[]` instead."
));
pub const error2021: Error = Cow::Borrowed(concat!(
"A variadic parameter `...` may only appear at the end of ",
"a parameter list."
));
pub const error2023: Error =
Cow::Borrowed("Abstract constructors cannot have parameters with visibility modifiers");
pub const error2024: Error =
Cow::Borrowed("Traits or interfaces cannot have parameters with visibility modifiers");
pub const error2022: Error =
Cow::Borrowed("A variadic parameter `...` may not be followed by a comma.");
pub fn error2025(class_name: &str, prop_name: &str) -> Error {
Cow::Owned(format!(
"Cannot redeclare `{}::{}`",
class_name.to_string(),
prop_name.to_string(),
))
}
pub const error2029: Error = Cow::Borrowed("Only traits and interfaces may use `require extends`.");
pub const error2030: Error = Cow::Borrowed("Only traits may use `require implements`.");
pub const error2032: Error = Cow::Borrowed("The array type is not allowed in `strict` mode.");
pub const error2033: Error = Cow::Borrowed(concat!(
"The splat operator `...` for unpacking variadic arguments ",
"may only appear at the **end** of an argument list."
));
pub const error2034: Error = Cow::Borrowed(concat!(
"A type alias declaration cannot both use `type` and have a ",
"constraint. Did you mean `newtype`?"
));
pub const error2035: Error = Cow::Borrowed("Only classes may implement interfaces.");
pub const error2036: Error = Cow::Borrowed(concat!(
"Only interfaces and classes may extend other interfaces and ",
"classes."
));
pub const error2037: Error = Cow::Borrowed("A class may extend at most **one** other class.");
pub fn error2038(constructor_name: &str) -> Error {
Cow::Owned(format!(
concat!(
"A constructor initializing an object must be passed a (possibly empty) ",
"list of arguments. Did you mean `new {}()`?",
),
constructor_name.to_string(),
))
}
pub const error2040: Error = Cow::Borrowed(concat!(
"Invalid use of `list(...)`. A list expression may only be ",
"used as the left side of a simple assignment, the value clause of a ",
"`foreach` loop, or a list item nested inside another list expression."
));
pub const error2041: Error = Cow::Borrowed(concat!(
"Unexpected method body: interfaces may contain only",
" method signatures, and **not** method implementations."
));
pub const error2042: Error = Cow::Borrowed("Only classes may be declared `abstract`.");
pub fn error2046(method_type: &str) -> Error {
Cow::Owned(format!(
"`async` cannot be used on {}. Use an `Awaitable<...>` return type instead.",
method_type.to_string(),
))
}
pub const error2048: Error = Cow::Borrowed("Expected group `use` prefix to end with `\\`");
pub const error2049: Error =
Cow::Borrowed("A namespace `use` clause may not specify the kind here.");
pub const error2050: Error =
Cow::Borrowed("A concrete constant declaration must have an initializer.");
pub const error2051: Error =
Cow::Borrowed("An abstract constant declaration must not have an initializer.");
pub const error2052: Error = Cow::Borrowed(concat!(
"Cannot mix bracketed namespace declarations with ",
"unbracketed namespace declarations"
));
pub const error2053: Error = Cow::Borrowed(concat!(
"Use of `var` as synonym for `public` in declaration disallowed in Hack. ",
"Use `public` instead."
));
pub const error2054: Error = Cow::Borrowed(concat!(
"Method declarations require a visibility modifier ",
"such as `public`, `private` or `protected`."
));
pub const error2055: Error = Cow::Borrowed("At least one enumerated item is expected.");
pub const error2056: Error = Cow::Borrowed("First unbracketed namespace occurrence here");
pub const error2057: Error = Cow::Borrowed("First bracketed namespace occurrence here");
pub const invalid_shape_field_name: Error =
Cow::Borrowed("Shape field name must be a nonempty single-quoted string or a class constant");
pub const shape_field_int_like_string: Error =
Cow::Borrowed("Shape field name must not be an int-like string (i.e. \"123\")");
pub const error2061: Error = Cow::Borrowed(concat!(
"Non-static instance variables are not allowed in abstract ",
"final classes."
));
pub const error2062: Error =
Cow::Borrowed("Non-static methods are not allowed in `abstract final` classes.");
pub const error2063: Error = Cow::Borrowed("Expected integer or string literal.");
pub const error2065: Error =
Cow::Borrowed("A variadic parameter `...` must not have a default value.");
// This was typing error 4077.
pub const error2066: Error = Cow::Borrowed(concat!(
"A previous parameter has a default value. Remove all the ",
"default values for the preceding parameters, or add a default value to ",
"this one."
));
pub const error2068: Error = Cow::Borrowed("`hh` blocks and `php` blocks cannot be mixed.");
pub const invalid_octal_integer: Error = Cow::Borrowed("Invalid octal integers");
pub const prefixed_invalid_string_kind: Error =
Cow::Borrowed("Only double-quoted strings may be prefixed.");
pub const illegal_interpolated_brace_with_embedded_dollar_expression: Error =
Cow::Borrowed(concat!(
"The only legal expressions inside a `{$...}`-expression embedded in a string are ",
"variables, function calls, subscript expressions, and member access expressions"
));
pub const expected_dotdotdot: Error = Cow::Borrowed("`...` is expected here.");
pub const invalid_foreach_element: Error = Cow::Borrowed(
"An arrow `=>` or right parenthesis `)` \
is expected here.",
);
pub const inline_function_def: Error =
Cow::Borrowed("Inline function definitions are not supported in Hack");
pub const decl_outside_global_scope: Error =
Cow::Borrowed("Declarations are not supported outside global scope");
pub const type_keyword: Error = Cow::Borrowed("The `type` keyword is expected here.");
pub const expected_simple_offset_expression: Error =
Cow::Borrowed("A simple offset expression is expected here");
pub const expected_user_attribute: Error = Cow::Borrowed("A user attribute is expected here.");
pub const expected_as_or_insteadof: Error =
Cow::Borrowed("The `as` keyword or the `insteadof` keyword is expected here.");
pub const missing_double_quote: Error = /* error0010 analogue */
Cow::Borrowed("A double quote is expected here.");
pub const instanceof_disabled: Error = Cow::Borrowed(
"The `instanceof` operator is not supported in Hack; use the `is` operator or `is_a()`",
);
pub const abstract_instance_property: Error =
Cow::Borrowed("Instance property may not be abstract.");
pub const memoize_lsb_on_non_static: Error =
Cow::Borrowed("`<<__MemoizeLSB>>` can only be applied to static methods");
pub const memoize_lsb_on_non_method: Error =
Cow::Borrowed("`<<__MemoizeLSB>>` can only be applied to methods");
pub const expression_as_attribute_arguments: Error =
Cow::Borrowed("Attribute arguments must be literals");
pub const instanceof_invalid_scope_resolution: Error = Cow::Borrowed(concat!(
"A scope resolution `::` on the right side of an ",
"`instanceof` operator must start with a class name, `self`, `parent`, or `static`, and end with ",
"a variable",
));
pub const instanceof_memberselection_inside_scoperesolution: Error = Cow::Borrowed(concat!(
"A scope resolution `::` on the right ",
"side of an instanceof operator cannot contain a member selection `->`",
));
pub const instanceof_missing_subscript_index: Error = Cow::Borrowed(concat!(
"A subscript expression `[]` on the right side of an ",
"instanceof operator must have an index",
));
pub fn instanceof_new_unknown_node(msg: &str) -> Error {
Cow::Owned(format!(
"Unexpected node on right hand side of `new` or `instanceof`: `{}`",
msg.to_string(),
))
}
pub const invalid_await_use: Error = Cow::Borrowed("`await` cannot be used as an expression");
pub const toplevel_await_use: Error =
Cow::Borrowed("`await` cannot be used in a toplevel statement");
pub const invalid_constructor_method_call: Error = Cow::Borrowed(
"Method call following immediate constructor call requires parentheses around constructor call.",
);
pub const invalid_scope_resolution_qualifier: Error =
Cow::Borrowed("Only classnames and variables are allowed before `::`.");
pub const invalid_variable_name: Error = Cow::Borrowed(
"A valid variable name starts with a letter or underscore, followed by any number of letters, numbers, or underscores",
);
pub const invalid_yield: Error =
Cow::Borrowed("`yield` can only appear as a statement or on the right of an assignment");
pub const invalid_class_in_collection_initializer: Error =
Cow::Borrowed("Cannot use collection initialization for non-collection class.");
pub const invalid_brace_kind_in_collection_initializer: Error = Cow::Borrowed(
"Initializers of `vec`, `dict` and `keyset` should use `[...]` instead of `{...}`.",
);
pub fn invalid_value_initializer(name: &str) -> Error {
Cow::Owned(format!(
"Cannot use value initializer for `{}`. It requires `key => value`.",
name.to_string(),
))
}
pub fn invalid_key_value_initializer(name: &str) -> Error {
Cow::Owned(format!(
"Cannot use key value initializer for `{}`. It does not allow keys.",
name.to_string(),
))
}
pub const nested_ternary: Error = Cow::Borrowed(
"Nested ternary expressions inside ternary expressions are ambiguous. Please add parentheses",
);
pub const alternate_control_flow: Error =
Cow::Borrowed("Alternate control flow syntax is not allowed in Hack files");
pub const execution_operator: Error =
Cow::Borrowed("The execution operator is not allowed in Hack files");
pub const non_re_prefix: Error = Cow::Borrowed("Only `re`-prefixed strings allowed.");
pub const collection_intrinsic_generic: Error =
Cow::Borrowed("Cannot initialize collection builtins with type parameters");
pub const collection_intrinsic_many_typeargs: Error =
Cow::Borrowed("Collection expression must have less than three type arguments");
pub const invalid_hack_mode: Error =
Cow::Borrowed("Incorrect comment; possible values include `strict`, `partial`, or empty");
pub const pair_initializer_needed: Error = Cow::Borrowed("Initializer needed for Pair object");
pub const pair_initializer_arity: Error =
Cow::Borrowed("Pair objects must have exactly 2 elements");
pub const toplevel_statements: Error =
Cow::Borrowed("Toplevel statements are not allowed. Use `__EntryPoint` attribute instead");
pub const invalid_reified: Error =
Cow::Borrowed("`reify` keyword can only appear at function or class type parameter position");
pub fn reified_in_invalid_classish(s: &str) -> Error {
Cow::Owned(format!(
"Invalid to use a reified type within {}'s type parameters",
s.to_string(),
))
}
pub const shadowing_reified: Error = Cow::Borrowed("You may not shadow a reified parameter");
pub const static_property_in_reified_class: Error =
Cow::Borrowed("You may not use static properties in a class with reified type parameters");
pub const cls_reified_generic_in_static_method: Error =
Cow::Borrowed("You may not use reified generics of the class in a static method");
pub const static_method_reified_obj_creation: Error = Cow::Borrowed(
"You may not use object creation for potentially reified `self` or `parent` from a static method",
);
pub const non_invariant_reified_generic: Error =
Cow::Borrowed("Reified generics cannot be covariant or contravariant");
pub const no_generics_on_constructors: Error = Cow::Borrowed(
"Generic type parameters are not allowed on constructors. Consider adding a type parameter to the class",
);
pub const no_type_parameters_on_dynamic_method_calls: Error =
Cow::Borrowed("Generics type parameters are disallowed on dynamic method calls");
pub const dollar_unary: Error =
Cow::Borrowed("The dollar sign `$` cannot be used as a unary operator");
pub const type_alias_to_type_constant: Error =
Cow::Borrowed("Type aliases to type constants are not supported");
pub const interface_with_memoize: Error =
Cow::Borrowed("`__Memoize` is not allowed on interface methods");
pub const multiple_reactivity_annotations: Error = Cow::Borrowed(concat!(
"Only one of following annotations is allowed: `__Pure`, `__Rx`, ",
"`__RxShallow`, `__RxLocal`, `__NonRx`, `__Cipp`, `__CippLocal`, `__CippGlobal`.",
));
pub const functions_cannot_implement_reactive: Error =
Cow::Borrowed("`__OnlyRxIfImpl` annotations are only valid on class methods.");
pub const missing_reactivity_for_condition: Error = Cow::Borrowed(concat!(
"`__OnlyRxIfImpl` and `__AtMostRxAsArgs` annotations cannot ",
"be used without `__Pure`, `__Rx`, `__RxShallow`, or `__RxLocal`.",
));
pub const conflicting_mutable_and_owned_mutable_attributes: Error =
Cow::Borrowed("Parameter cannot have both `__Mutable` and `__OwnedMutable` annotations.");
pub const conflicting_mutable_and_maybe_mutable_attributes: Error =
Cow::Borrowed("Parameter cannot have both `__Mutable` and `__MaybeMutable` annotations.");
pub const conflicting_owned_mutable_and_maybe_mutable_attributes: Error =
Cow::Borrowed("Parameter cannot have both `__OwnedMutable` and `__MaybeMutable` annotations.");
pub const mutably_owned_attribute_on_non_rx_function: Error =
Cow::Borrowed("`__OwnedMutable` annotated parameters are only allowed in reactive functions.");
pub const invalid_non_rx_argument_for_lambda: Error = Cow::Borrowed(concat!(
"Invalid argument list for `__NonRx` attribute that is placed on anonymous function. ",
"Argument list for `__NonRx` attribute that is used in this position should be empty.",
));
pub const invalid_non_rx_argument_for_declaration: Error = Cow::Borrowed(concat!(
"Invalid argument list for `__NonRx` attribute that is placed on a declaration of function or method. ",
"Argument list for `__NonRx` attribute that is used in this position should contain only one string literal value.",
));
pub const nested_concurrent_blocks: Error = Cow::Borrowed("`concurrent` blocks cannot be nested.");
pub const fewer_than_two_statements_in_concurrent_block: Error = Cow::Borrowed(concat!(
"Expected 2 or more statements in concurrent block. `concurrent` wrapping ",
"nothing or a single statement is not useful or already implied.",
));
pub const invalid_syntax_concurrent_block: Error = Cow::Borrowed(concat!(
"`concurrent` block must contain a compound statement of two or ",
"more expression statements, IE concurrent `{ <expr>; <expr>; }`.",
));
pub const statement_without_await_in_concurrent_block: Error =
Cow::Borrowed("Statement without an `await` in a concurrent block");
pub const concurrent_is_disabled: Error = Cow::Borrowed("`concurrent` is disabled");
pub const static_closures_are_disabled: Error =
Cow::Borrowed("Static closures are not supported in Hack");
pub const invalid_await_position: Error = Cow::Borrowed(concat!(
"`await` cannot be used as an expression in this ",
"location because it's conditionally executed.",
));
pub const invalid_await_position_dependent: Error = Cow::Borrowed(concat!(
"`await` cannot be used as an expression inside another await expression. ",
"Pull the inner `await` out into its own statement.",
));
pub const mutability_annotation_on_constructor: Error = Cow::Borrowed(
"`__Mutable`, `__MaybeMutable`, and `__MutableReturn` annotations are not allowed on constructors.",
);
pub const mutability_annotation_on_static_method: Error = Cow::Borrowed(
"`__Mutable` and `__MaybeMutable` annotations are not allowed on static methods.",
);
pub const mutability_annotation_on_inout_parameter: Error = Cow::Borrowed(
"`__Mutable`, `__MaybeMutable` and `__OwnedMutable` annotations are not allowed on inout parameters.",
);
pub fn mutable_parameter_in_memoize_function(is_this: bool) -> Error {
Cow::Owned(format!(
"Memoized functions cannot have mutable {}",
if is_this { "`$this`." } else { "parameters." }.to_string()
))
}
pub const mutable_return_in_memoize_function: Error =
Cow::Borrowed("Memoized functions cannot return mutable objects.");
pub const tparams_in_tconst: Error =
Cow::Borrowed("Type parameters are not allowed on class type constants");
pub const targs_not_allowed: Error =
Cow::Borrowed("Type arguments are not allowed in this position");
pub const reified_attribute: Error = Cow::Borrowed(
"`__Reified` and `__HasReifiedParent` attributes may not be provided by the user",
);
pub const lval_as_expression: Error = Cow::Borrowed(
"Assignments can no longer be used as expressions. Pull the assignment out into a separate statement.",
);
pub fn elt_abstract_private(elt: &str) -> Error {
Cow::Owned(format!(
"Cannot declare abstract {} `private`.",
elt.to_string(),
))
}
pub const only_soft_allowed: Error = Cow::Borrowed("Only the `__Soft` attribute is allowed here.");
pub const soft_no_arguments: Error =
Cow::Borrowed("The `__Soft` attribute does not take arguments.");
pub const no_legacy_soft_typehints: Error = Cow::Borrowed(
"The `@` syntax for soft typehints is not allowed. Use the `__Soft` attribute instead.",
);
pub const outside_dollar_str_interp: Error =
Cow::Borrowed("The `${x}` syntax is disallowed in Hack. Use `{$x}` instead.");
pub const no_const_interfaces_traits_enums: Error =
Cow::Borrowed("Interfaces, traits and enums may not be declared `__Const`");
pub const no_const_late_init_props: Error =
Cow::Borrowed("`__Const` properties may not also be `__LateInit`");
pub const no_const_static_props: Error = Cow::Borrowed("Static properties may not be `__Const`");
pub const no_const_abstract_final_class: Error =
Cow::Borrowed("Cannot apply `__Const` attribute to an abstract final class");
pub const no_legacy_attribute_syntax: Error = Cow::Borrowed(
"The `<<...>>` syntax for user attributes is not allowed. Use the `@` syntax instead.",
);
pub const no_silence: Error = Cow::Borrowed("The error suppression operator `@` is not allowed");
pub const const_mutation: Error = Cow::Borrowed("Cannot mutate a class constant");
pub const no_attributes_on_variadic_parameter: Error =
Cow::Borrowed("Attributes on variadic parameters are not allowed");
pub const invalid_constant_initializer: Error =
Cow::Borrowed("Invalid expression in constant initializer");
pub const parent_static_prop_decl: Error =
Cow::Borrowed("Cannot use `static` or `parent::class` in property declaration");
pub fn error2070(open_tag: &str, close_tag: &str) -> Error {
Cow::Owned(format!(
"XHP: mismatched tag: `{}` not the same as `{}`",
close_tag.to_string(),
open_tag.to_string(),
))
}
pub fn error2071(s: &str) -> Error {
Cow::Owned(format!("Decimal number is too big: `{}`", s.to_string(),))
}
pub fn error2072(s: &str) -> Error {
Cow::Owned(format!(
"Hexadecimal number is too big: `{}`",
s.to_string(),
))
}
pub const error2073: Error = Cow::Borrowed(concat!(
"A variadic parameter `...` cannot have a modifier ",
"that changes the calling convention, like `inout`.",
));
pub fn error2074(call_modifier: &str) -> Error {
Cow::Owned(format!(
"An `{}` parameter must not have a default value.",
call_modifier.to_string(),
))
}
pub const error2077: Error = Cow::Borrowed("Cannot use empty list");
pub fn not_allowed_in_write(what: &str) -> Error {
Cow::Owned(format!(
"{} is not allowed in write context",
what.to_string(),
))
}
pub const reassign_this: Error = Cow::Borrowed("Cannot re-assign `$this`");
pub const enum_elem_name_is_class: Error = Cow::Borrowed("Enum element cannot be named `class`");
pub const sealed_enum: Error = Cow::Borrowed("Enums cannot be sealed.");
pub const property_requires_visibility: Error = Cow::Borrowed(concat!(
"Property declarations require a visibility modifier ",
"such as `public`, `private` or `protected`.",
));
pub const abstract_prop_init: Error =
Cow::Borrowed("An `abstract` property must not have an initializer.");
pub const const_static_prop_init: Error =
Cow::Borrowed("A `const static` property must have an initializer.");
pub fn namespace_name_is_already_in_use(name: &str, short_name: &str) -> Error {
Cow::Owned(format!(
"Cannot use namespace `{}` as `{}` because the name is already in use",
name.to_string(),
short_name.to_string()
))
}
pub const strict_namespace_hh: Error = Cow::Borrowed(concat!(
"To use strict Hack, place `// strict` after the open tag. ",
"If it's already there, remove this line. ",
"Hack is strict already.",
));
pub fn name_is_already_in_use_hh(line_num: isize, name: &str, short_name: &str) -> Error {
Cow::Owned(format!(
"Cannot use `{}` as `{}` because the name was explicitly used earlier via a `use` statement on line {}",
name.to_string(),
short_name.to_string(),
line_num.to_string(),
))
}
pub fn name_is_already_in_use_implicit_hh(line_num: isize, name: &str, short_name: &str) -> Error {
Cow::Owned(format!(
concat!(
"Cannot use `{}` as `{}` because the name was implicitly used on line {}",
"; implicit use of names from the HH namespace can be suppressed by adding an explicit",
" `use` statement earlier in the current namespace block",
),
name.to_string(),
short_name.to_string(),
line_num.to_string(),
))
}
pub fn name_is_already_in_use_php(name: &str, short_name: &str) -> Error {
Cow::Owned(format!(
"Cannot use `{}` as `{}` because the name is already in use",
name.to_string(),
short_name.to_string(),
))
}
pub const original_definition: Error = Cow::Borrowed("Original definition");
pub fn function_name_is_already_in_use(name: &str, short_name: &str) -> Error {
Cow::Owned(format!(
"Cannot use function `{}` as `{}` because the name is already in use",
name.to_string(),
short_name.to_string(),
))
}
pub fn const_name_is_already_in_use(name: &str, short_name: &str) -> Error {
Cow::Owned(format!(
"Cannot use const `{}` as `{}` because the name is already in use",
name.to_string(),
short_name.to_string(),
))
}
pub fn type_name_is_already_in_use(name: &str, short_name: &str) -> Error {
Cow::Owned(format!(
"Cannot use type `{}` as `{}` because the name is already in use",
name.to_string(),
short_name.to_string(),
))
}
pub const namespace_decl_first_statement: Error = Cow::Borrowed(
"Namespace declaration statement has to be the very first statement in the script",
);
pub const code_outside_namespace: Error =
Cow::Borrowed("No code may exist outside of namespace {}");
pub const global_in_const_decl: Error =
Cow::Borrowed("Cannot have globals in constant declaration");
pub const parent_static_const_decl: Error =
Cow::Borrowed("Cannot use `static` or `parent::class` in constant declaration");
pub const no_async_before_lambda_body: Error =
Cow::Borrowed("Don't use `() ==> async { ... }`. Instead, use: `async () ==> { ... }`");
pub fn invalid_number_of_args(name: &str, n: usize) -> Error {
Cow::Owned(format!(
"Method `{}` must take exactly {} arguments",
name.to_string(),
n.to_string(),
))
}
pub fn invalid_inout_args(name: &str) -> Error {
Cow::Owned(format!(
"Method `{}` cannot take inout arguments",
name.to_string(),
))
}
pub fn redeclaration_error(name: &str) -> Error {
Cow::Owned(format!("Cannot redeclare `{}`", name.to_string(),))
}
pub fn declared_name_is_already_in_use_implicit_hh(
line_num: usize,
name: &str,
_short_name: &str,
) -> Error {
Cow::Owned(format!(
concat!(
"Cannot declare `{}` because the name was implicitly used on line {}; ",
"implicit use of names from the HH namespace can be suppressed by adding an explicit ",
"`use` statement earlier in the current namespace block",
),
name.to_string(),
line_num.to_string(),
))
}
pub fn declared_name_is_already_in_use(line_num: usize, name: &str, _short_name: &str) -> Error {
Cow::Owned(format!(
concat!(
"Cannot declare `{}` because the name was explicitly used earlier via a `use` ",
"statement on line {}",
),
name.to_string(),
line_num.to_string(),
))
}
pub const const_in_trait: Error = Cow::Borrowed("Traits cannot have constants");
pub const sealed_val_not_classname: Error =
Cow::Borrowed("Values in sealed whitelist must be classname constants.");
pub const sealed_qualifier_invalid: Error =
Cow::Borrowed("`__Sealed` can only be used with named types, e.g. `Foo::class`");
pub const list_must_be_lvar: Error =
Cow::Borrowed("`list()` can only be used as an lvar. Did you mean to use `tuple()`?");
pub const async_not_last: Error =
Cow::Borrowed("The `async` modifier must be directly before the `function` keyword.");
pub const using_st_function_scoped_top_level: Error = Cow::Borrowed(concat!(
"Using statement in function scoped form may only be used at the top ",
"level of a function or a method",
));
pub const double_variadic: Error = Cow::Borrowed("Parameter redundantly marked as variadic `...`.");
pub fn conflicting_trait_require_clauses(name: &str) -> Error {
Cow::Owned(format!(
"Conflicting requirements for `{}`",
name.to_string(),
))
}
pub const shape_type_ellipsis_without_trailing_comma: Error =
Cow::Borrowed("A comma is required before the `...` in a shape type");
pub const yield_in_magic_methods: Error =
Cow::Borrowed("`yield` is not allowed in constructors or magic methods");
pub const yield_outside_function: Error =
Cow::Borrowed("`yield` can only be used inside a function");
pub const coloncolonclass_on_dynamic: Error =
Cow::Borrowed("Dynamic class names are not allowed in compile-time `::class` fetch");
pub const this_in_static: Error =
Cow::Borrowed("Don't use `$this` in a static method, use `static::` instead");
pub fn async_magic_method(name: &str) -> Error {
Cow::Owned(format!(
"Cannot declare constructors and magic methods like `{}` as `async`",
name.to_string(),
))
}
pub fn unsupported_magic_method(name: &str) -> Error {
Cow::Owned(format!(
"Magic `{}` methods are no longer supported",
name.to_string(),
))
}
pub fn reserved_keyword_as_class_name(class_name: &str) -> Error {
Cow::Owned(format!(
"Cannot use `{}` as class name as it is reserved",
class_name.to_string(),
))
}
pub const xhp_class_multiple_category_decls: Error =
Cow::Borrowed("An XHP class cannot have multiple category declarations");
pub const xhp_class_multiple_children_decls: Error =
Cow::Borrowed("An XHP class cannot have multiple children declarations");
pub const inout_param_in_generator: Error =
Cow::Borrowed("Parameters may not be marked `inout` on generators");
pub const inout_param_in_async_generator: Error =
Cow::Borrowed("Parameters may not be marked `inout` on `async` generators");
pub const inout_param_in_async: Error =
Cow::Borrowed("Parameters may not be marked `inout` on `async` functions");
pub const inout_param_in_construct: Error =
Cow::Borrowed("Parameters may not be marked `inout` on constructors");
pub const fun_arg_inout_set: Error =
Cow::Borrowed("You cannot set an `inout` decorated argument while calling a function");
pub const fun_arg_inout_const: Error = Cow::Borrowed("You cannot decorate a constant as `inout`");
pub const fun_arg_invalid_arg: Error =
Cow::Borrowed("You cannot decorate this argument as `inout`");
pub const fun_arg_inout_containers: Error = Cow::Borrowed(concat!(
"Parameters marked `inout` must be contained in locals, vecs, dicts, keysets,",
" and arrays",
));
pub const memoize_with_inout: Error =
Cow::Borrowed("`<<__Memoize>>` cannot be used on functions with `inout` parameters");
pub const method_calls_on_xhp_attributes: Error =
Cow::Borrowed("Method calls are not allowed on XHP attributes");
pub const method_calls_on_xhp_expression: Error =
Cow::Borrowed("Please add parentheses around the XHP component");
pub fn class_with_abstract_method(name: &str) -> Error {
Cow::Owned(format!(
concat!(
"Class `{}` contains an abstract method and must ",
"therefore be declared `abstract`",
),
name.to_string(),
))
}
pub const interface_has_private_method: Error =
Cow::Borrowed("Interface methods must be `public` or `protected`");
pub fn redeclaration_of_function(name: &str, loc: &str) -> Error {
Cow::Owned(format!(
"Cannot redeclare `{}()` (previously declared in {})",
name.to_string(),
loc.to_string()
))
}
pub fn redeclaration_of_method(name: &str) -> Error {
Cow::Owned(format!("Redeclared method `{}`", name.to_string(),))
}
pub fn self_or_parent_colon_colon_class_outside_of_class(name: &str) -> Error {
Cow::Owned(format!(
"Cannot access `{}::class` when no class scope is active",
name.to_string(),
))
}
pub fn invalid_is_as_expression_hint(n: &str, hint: &str) -> Error {
Cow::Owned(format!(
"`{}` typehints cannot be used with `{}` expressions",
hint.to_string(),
n.to_string(),
))
}
pub const elvis_operator_space: Error = Cow::Borrowed("An Elvis operator `?:` is expected here.");
pub fn clone_takes_no_arguments(class_name: &str, method_name: &str) -> Error {
Cow::Owned(format!(
"Method `{}::{}` cannot accept any arguments",
class_name.to_string(),
method_name.to_string(),
))
}
pub fn clone_cannot_be_static(class_name: &str, method_name: &str) -> Error {
Cow::Owned(format!(
"Clone method `{}::{}()` cannot be static",
class_name.to_string(),
method_name.to_string(),
))
}
pub const namespace_not_a_classname: Error =
Cow::Borrowed("Namespace cannot be used as a classname");
pub const for_with_as_expression: Error =
Cow::Borrowed("For loops can not use `as` expressions. Did you mean `foreach`?");
pub const sealed_final: Error = Cow::Borrowed("Classes cannot be both `final` and `sealed`.");
pub const interface_implements: Error =
Cow::Borrowed("Interfaces may not implement other interfaces or classes");
pub const memoize_on_lambda: Error =
Cow::Borrowed("`<<__Memoize>>` attribute is not allowed on lambdas or anonymous functions.");
pub fn declared_final(elt: &str) -> Error {
Cow::Owned(format!("{} cannot be declared `final`.", elt.to_string(),))
}
pub fn invalid_xhp_classish(elt: &str) -> Error {
Cow::Owned(format!("{} are not valid xhp classes.", elt.to_string(),))
}
pub const empty_method_name: Error = Cow::Borrowed("Expected a method name");
pub fn lowering_parsing_error(text: &str, syntax: &str) -> Error {
Cow::Owned(format!(
"Encountered unexpected text `{}`, was expecting a {}.",
text.to_string(),
syntax.to_string(),
))
}
pub const xhp_class_attribute_type_constant: Error =
Cow::Borrowed("Type constants are not allowed on xhp class attributes");
pub const globals_disallowed: Error =
Cow::Borrowed("`$GLOBALS` variable is removed from the language. Use HH\\global functions");
pub const invalid_this: Error =
Cow::Borrowed("`$this` cannot be used in functions and static methods");
pub const cannot_unset_this: Error = Cow::Borrowed("`$this` cannot be unset");
pub const invalid_await_position_pipe: Error =
Cow::Borrowed("`await` cannot be used as an expression right of a pipe operator.");
pub fn invalid_modifier_for_declaration(decl: &str, modifier: &str) -> Error {
Cow::Owned(format!(
"{} cannot be declared `{}`",
decl.to_string(),
modifier.to_string(),
))
}
pub fn duplicate_modifiers_for_declaration(decl: &str) -> Error {
Cow::Owned(format!(
"{} cannot have duplicate modifiers",
decl.to_string(),
))
}
pub fn multiple_visibility_modifiers_for_declaration(decl: &str) -> Error {
Cow::Owned(format!(
"{} cannot have multiple visibility modifiers",
decl.to_string(),
))
}
pub const break_continue_n_not_supported: Error =
Cow::Borrowed("`break`/`continue N` operators are not supported.");
pub fn invalid_typehint_alias(alias: &str, hint: &str) -> Error {
Cow::Owned(format!(
"Invalid type hint `{}`. Use `{}` instead",
alias.to_string(),
hint.to_string(),
))
}
pub const function_pointer_bad_recv: Error = Cow::Borrowed(concat!(
"Function pointers `<>` can only be created with toplevel functions and explicitly named static methods. ",
"Use lambdas `(...) ==> {...}` for other cases."
));
pub const local_variable_with_type: Error =
Cow::Borrowed("Local variables cannot have type annotations in Hack.");
pub const empty_expression_illegal: Error =
Cow::Borrowed("The `empty()` expression has been removed from Hack.");
pub const empty_switch_cases: Error =
Cow::Borrowed("`switch` statements need to have at least one `case` or a `default` block");
pub const preceding_backslash: Error = Cow::Borrowed("Unnecessary preceding backslash");
pub fn multiple_entrypoints(loc: &str) -> Error {
Cow::Owned(format!(
"Only one `__EntryPoint` annotation is permitted per file (previous `__EntryPoint` annotation in {})",
loc.to_string()
))
}
pub fn cannot_use_feature(feature: &str) -> Error {
Cow::Owned(format!(
"Cannot use unstable feature: `{}`",
feature.to_string()
))
}
pub fn invalid_use_of_enable_unstable_feature(message: &str) -> Error {
Cow::Owned(format!(
"This is an invalid use of `__EnableUnstableFeatures` because {}",
message.to_string()
))
}
pub const splice_outside_et: Error =
Cow::Borrowed("Splicing can only occur inside expression tree literals (between backticks)");
pub const invalid_enum_class_enumerator: Error = Cow::Borrowed("Invalid enum class constant");
pub const fun_disabled: Error =
Cow::Borrowed("`fun()` is disabled; switch to first-class references like `foo<>`");
pub const class_meth_disabled: Error =
Cow::Borrowed("`class_meth()` is disabled; switch to first-class references like `C::bar<>`");
| 46.576844 | 123 | 0.696408 |
f9487594c8f343a67abd5355e390d125924017fa | 1,161 | extern crate tokio;
extern crate whois_rust;
use whois_rust::*;
#[test]
fn test() {
let who = WhoIs::from_path("node-whois/servers.json").unwrap();
let result = who.lookup(WhoIsLookupOptions::from_string("magiclen.org").unwrap()).unwrap();
println!("{}", result);
let result = who.lookup(WhoIsLookupOptions::from_string("66.42.43.17").unwrap()).unwrap();
println!("{}", result);
let result =
who.lookup(WhoIsLookupOptions::from_string("fe80::5400:1ff:feaf:b71").unwrap()).unwrap();
println!("{}", result);
}
#[cfg(feature = "tokio")]
#[tokio::test]
async fn test_async() {
let who = WhoIs::from_path_async("node-whois/servers.json").await.unwrap();
let result =
who.lookup_async(WhoIsLookupOptions::from_string("magiclen.org").unwrap()).await.unwrap();
println!("{}", result);
let result =
who.lookup_async(WhoIsLookupOptions::from_string("66.42.43.17").unwrap()).await.unwrap();
println!("{}", result);
let result = who
.lookup_async(WhoIsLookupOptions::from_string("fe80::5400:1ff:feaf:b71").unwrap())
.await
.unwrap();
println!("{}", result);
}
| 29.025 | 98 | 0.639966 |
1cee5ca867eb6e45100c98c397178e246074a39d | 675,321 | #[doc = "⬆\u{fe0f}"]
pub const UP_ARROW: crate::Emoji = crate::Emoji {
glyph: "⬆\u{fe0f}",
codepoint: "2B06 FE0F",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "up arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[crate::Emoji {
glyph: "⬆",
codepoint: "2B06",
status: crate::Status::Unqualified,
introduction_version: 0.6f32,
name: "up arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: true,
variants: &[],
annotations: &[],
}],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("pyl na bo"),
keywords: &["noord", "op", "pyl", "pyl na bo", "rigting"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("ወደ ላይ ጠቋሚ ቀስት"),
keywords: &["ሰሜን", "ቀስት", "አቅጣጫ", "ካርዲናል", "ወደ ላይ ጠቋሚ ቀስት"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("سهم لأعلى"),
keywords: &["اتجاه", "سهم", "سهم لأعلى", "شمال", "كاردينال"],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some("ঊৰ\u{9cd}ধ\u{9cd}বম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}"),
keywords: &[
"উত\u{9cd}তৰ",
"ঊৰ\u{9cd}ধ\u{9cd}বম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}",
"ক\u{9be}\u{981}ড\u{9bc}",
"দিশ",
"ম\u{9c1}খ\u{9cd}য দিশবিল\u{9be}ক",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("üzüyuxarı ox"),
keywords: &["istiqamət", "ox", "üzüyuxarı ox", "şimal", "əsas"],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("стрэлка ўверх"),
keywords: &["кірунак", "поўнач", "стрэлка", "стрэлка ўверх", "уверх"],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("стрелка нагоре"),
keywords: &["посока", "север", "стрелка", "стрелка нагоре"],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("উপরে তীর"),
keywords: &[
"উত\u{9cd}তর",
"উত\u{9cd}তর দিক",
"উপরে",
"উপরে তীর",
"দিকনির\u{9cd}দেশ",
"পরিম\u{9be}ণব\u{9be}চক",
],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelica nagore"),
keywords: &["sjever", "smjer", "strelica", "strelica nagore"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("fletxa cap amunt"),
keywords: &["amunt", "direcció", "fletxa", "fletxa cap amunt", "nord"],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎦᎸᎳᏓᎦᏘ ᎦᏝᏗ"),
keywords: &["ᎤᏴᏢᎢ", "ᎦᎸᎳᏓᎦᏘ ᎦᏝᏗ", "ᎦᏝᏗ", "ᏂᏚᏳᎪᏛᎢ", "ᏧᎵᏍᎨᏓ"],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("šipka nahoru"),
keywords: &[
"hlavní",
"sever",
"směr",
"strany",
"světové",
"šipka",
"šipka nahoru",
],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("saeth i fyny"),
keywords: &["Gogledd", "cyfeiriad", "saeth", "saeth i fyny"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("opadvendt pil"),
keywords: &[
"nord",
"opadvendt pil",
"pil",
"pil op",
"retning",
"verdenshjørne",
],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("Pfeil nach oben"),
keywords: &[
"Aufwärtspfeil",
"Norden",
"Pfeil",
"Pfeil nach oben",
"aufwärts",
"nach oben",
],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("πάνω βέλος"),
keywords: &["απόλυτη", "βέλος", "βόρεια", "κατεύθυνση", "πάνω βέλος"],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("up arrow"),
keywords: &["arrow", "cardinal", "direction", "north", "up arrow"],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["arrow", "cardinal", "direction", "north", "up"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flecha hacia arriba"),
keywords: &[
"dirección",
"flecha",
"flecha arriba",
"flecha hacia arriba",
"norte",
],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("↑↑↑"),
keywords: &[
"cardinal",
"dirección",
"flecha",
"flecha hacia arriba",
"flecha hacia el norte",
"norte",
],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &[
"cardinal",
"dirección",
"flecha",
"flecha hacia arriba",
"flecha hacia el norte",
"norte",
],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("nool üles"),
keywords: &["nool", "nool üles", "põhi", "suund"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("gora gezia"),
keywords: &["gezi", "gora gezia", "ipar", "kardinal", "norabide"],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("پیکان بالا"),
keywords: &["جهت اصلی", "شمال", "مسیر", "پیکان", "پیکان بالا"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("nuoli ylös"),
keywords: &[
"ilmansuunta",
"nuoli",
"nuoli ylös",
"pohjoinen",
"pääilmansuunta",
],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("pataas na arrow"),
keywords: &[
"arrow",
"cardinal",
"direksyon",
"hilaga",
"pataas na arrow",
],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("pílur sum peikar uppeftir"),
keywords: &[
"k´ós",
"norður",
"pílur",
"pílur sum peikar uppeftir",
"ætt",
],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèche haut"),
keywords: &["direction", "flèche", "flèche haut", "nord"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("flèche pointant vers le haut"),
keywords: &[
"direction",
"en haut",
"flèche",
"flèche pointant vers le haut",
"nord",
"point cardinal",
],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("saighead suas"),
keywords: &["príomhaird", "saighead", "suas", "treo", "ó thuaidh"],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("saighead gu tuath"),
keywords: &[
"combaist",
"comhair",
"saighead",
"saighead gu tuath",
"tuath",
"àirde",
],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frecha cara arriba"),
keywords: &["arriba", "dirección", "frecha cara arriba", "norte"],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("ઉપર તીર"),
keywords: &["ઉત\u{acd}તર", "ઉપર", "તીર", "દિશા", "મ\u{ac1}ખ\u{acd}ય"],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("kibiyar sama"),
keywords: &["arewa", "kibiya", "kibiyar sama", "shiyya", "tsinin shiyya"],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("חץ למעלה"),
keywords: &["חץ", "כיוון", "למעלה", "צפון"],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("उर\u{94d}ध\u{94d}वम\u{941}खी तीर"),
keywords: &[
"उत\u{94d}तर दिशा",
"उर\u{94d}ध\u{94d}वम\u{941}खी तीर",
"ऊपर तीर",
"ऊपर तीर, कार\u{94d}डिनल, उत\u{94d}तर दिशा",
"कार\u{94d}डिनल",
"तीर",
"दिशा",
],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("strelica prema gore"),
keywords: &["sjever", "smjer", "strelica", "strelica prema gore"],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("felfelé mutató nyíl"),
keywords: &["felfelé", "felfelé mutató nyíl", "irány", "nyíl", "észak"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("վերև սլաք"),
keywords: &["գլխավոր", "հյուսիս", "ուղղություն", "սլաք", "վերև սլաք"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah atas"),
keywords: &["arah", "kardinal", "panah", "tanda panah atas", "utara"],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("ube dị elu"),
keywords: &["kadịnal", "nduzị", "ube", "ube dị elu", "ugwu"],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("ör upp"),
keywords: &["höfuðátt", "norður", "átt", "ör", "ör upp"],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("freccia rivolta verso l’alto"),
keywords: &[
"direzione",
"freccia",
"freccia in alto",
"freccia rivolta verso l’alto",
"nord",
"punto cardinale",
],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("上矢印"),
keywords: &["上", "上矢印", "北", "矢印"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("panah munggah"),
keywords: &["arah", "kardinal", "lor", "panah", "panah munggah"],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ისარი ზემოთ"),
keywords: &[
"ისარი",
"ისარი ზემოთ",
"კარდინალური",
"მიმართულება",
"ჩრდილოეთი",
],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("aneccab d asawen"),
keywords: &["aneccab d asawen"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("жоғары қараған көрсеткі"),
keywords: &[
"бағыт",
"жоғары бағытты көрсеткі",
"жоғары қараған көрсеткі",
"көрсеткі",
"негізгі",
"солтүстік",
],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("opadvendt pil"),
keywords: &["nord", "opadvendt pil", "pil", "retning", "verdenshjørne"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some("ព\u{17d2}រ\u{17bd}ញទៅលើ"),
keywords: &[
"ជើង",
"ទ\u{17b7}ស",
"ទៅលើ",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញទៅលើ",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಮೇಲ\u{ccd}ಮುಖ ಬಾಣ"),
keywords: &[
"ಉತ\u{ccd}ತರ",
"ಕಾರ\u{ccd}ಡ\u{cbf}ನಲ\u{ccd}\u{200c}",
"ದ\u{cbf}ಕ\u{ccd}ಕು",
"ಬಾಣ",
"ಮೇಲ\u{ccd}ಮುಖ ಬಾಣ",
],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("상향 화살표"),
keywords: &["북쪽", "상향 화살표", "위쪽", "화살표"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("वयर बाण"),
keywords: &["उत\u{94d}तर", "कार\u{94d}डिनल", "दिशा", "बाण", "वयर बाण"],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("өйдө караган жебе"),
keywords: &["багыт", "жебе", "түндүк", "өйдө караган жебе"],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("Feil no uewen"),
keywords: &[
"Feil",
"Feil no uewen",
"Himmelsrichtung",
"Norden",
"Richtung",
],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນຂ\u{eb6}\u{ec9}ນ"),
keywords: &[
"ຂ\u{eb6}\u{ec9}ນ",
"ທ\u{eb4}ດທາງ",
"ລ\u{eb9}ກສອນ",
"ລ\u{eb9}ກສອນຂ\u{eb6}\u{ec9}ນ",
],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("rodyklė į viršų"),
keywords: &[
"koordinatė",
"kryptis",
"rodyklė",
"rodyklė į viršų",
"šiaurė",
],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("augšupbultiņa"),
keywords: &[
"augšupbultiņa",
"augšupvērsta bultiņa",
"bultiņa",
"norāde",
"ziemeļi",
],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pere ake"),
keywords: &["ahunga", "matua", "pere", "pere ake", "raki"],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("стрелка нагоре"),
keywords: &["горе", "север", "стрелка", "стрелка нагоре"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some(
"മ\u{d41}കളിലേക\u{d4d}ക\u{d4d} ച\u{d42}ണ\u{d4d}ട\u{d41}ന\u{d4d}ന അമ\u{d4d}പടയ\u{d3e}ളം",
),
keywords: &[
"അമ\u{d4d}പടയ\u{d3e}ളം",
"ക\u{d3e}ർഡിനൽ",
"ദിശ",
"മ\u{d41}കളിലേക\u{d4d}ക\u{d4d} ച\u{d42}ണ\u{d4d}ട\u{d41}ന\u{d4d}ന അമ\u{d4d}പടയ\u{d3e}ളം",
"വടക\u{d4d}ക\u{d4d}",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("дээшээ сум"),
keywords: &["дээшээ сум", "кардинал", "сум", "хойд", "чиглэл"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("वर दर\u{94d}शविणारा बाण"),
keywords: &[
"उत\u{94d}तर",
"दिशा",
"बाण",
"महत\u{94d}वाच\u{947}",
"वर दर\u{94d}शविणारा बाण",
],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("anak panah ke atas"),
keywords: &[
"anak panah",
"anak panah ke atas",
"arah",
"kardinal",
"utara",
],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("vleġġa ’l fuq"),
keywords: &[
"direzzjoni",
"kardinal",
"tramuntana",
"vleġġa",
"vleġġa ’l fuq",
],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("အပေါ\u{103a}ည\u{103d}\u{103e}န\u{103a}မြား"),
keywords: &[
"မြား",
"မြောက\u{103a}အရပ\u{103a} သင\u{103a}\u{1039}ကေတ",
"လမ\u{103a}းည\u{103d}\u{103e}န\u{103a}",
"အပေါ\u{103a}ည\u{103d}\u{103e}န\u{103a}မြား",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("pil opp"),
keywords: &["nord", "oppoverpil", "pil", "pil opp", "retning"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("माथि फर\u{94d}क\u{947}को तीर"),
keywords: &[
"उत\u{94d}तर",
"कार\u{94d}डिनल",
"तीर",
"दिशा",
"माथि फर\u{94d}क\u{947}को तीर",
],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("pijl omhoog"),
keywords: &["noord", "pijl", "pijl omhoog", "richting", "windrichting"],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("pil opp"),
keywords: &["nord", "oppoverpil", "pil", "pil opp", "retning"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ଉପର ତୀର"),
keywords: &[
"ଉପର ତୀର",
"ତୀର",
"ଦ\u{b3f}ଗ",
"ପ\u{b42}ର\u{b4d}ବ",
"ପ\u{b4d}ରଧ\u{b3e}ନ",
],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਉ\u{a71}ਪਰ ਤੀਰ"),
keywords: &[
"ਉ\u{a71}ਤਰ",
"ਉ\u{a71}ਪਰ ਤੀਰ",
"ਕਾਰਡੀਨਲ",
"ਤੀਰ",
"ਦਿਸ\u{a3c}ਾ",
],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("ا\u{64f}وتلا تیر"),
keywords: &["ا\u{64f}وتلا تیر", "اہم", "تیر", "سمت", "شمال"],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("Áro De Pọínt Ọp"),
keywords: &[
"Dairẹ\u{301}kshọn",
"Kọ\u{301}mpas",
"Nọt",
"Áro",
"Áro De Pọínt Ọp",
],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("strzałka w górę"),
keywords: &[
"kierunek",
"północ",
"strzałka",
"strzałka do góry",
"strzałka w górę",
],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("پورته غشی"),
keywords: &["اساسي", "اړخ", "شمال", "غشی", "پورته غشی"],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("seta para cima"),
keywords: &["cardinal", "direção", "norte", "seta", "seta para cima"],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("↑↑↑"),
keywords: &["cardeal", "direção", "norte", "seta", "seta para cima"],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("hanaq wach’i"),
keywords: &["hanaq wach’i"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("săgeată orientată în sus"),
keywords: &[
"cardinal",
"direcție",
"nord",
"săgeată",
"săgeată orientată în sus",
],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-157"),
keywords: &["E10-157"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("стрелка вверх"),
keywords: &["вверх", "направление", "север", "стрелка"],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("akambi kazamuka"),
keywords: &[
"akambi kazamuka",
"amajyaruguru",
"icyerekezo",
"kimwe mu byerekezo bine",
"umwambi",
],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("مٿي تير"),
keywords: &["اتر", "تير", "طرف", "مٿي تير", "ڪارڊينل"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("උඩ\u{dd4} ඊතලය"),
keywords: &[
"ඊතලය",
"උඩ\u{dd4} ඊතලය",
"උත\u{dd4}ර",
"ක\u{dcf}ඩ\u{dd2}නල\u{dca}",
"ද\u{dd2}ශ\u{dcf}ව",
],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("šípka nahor"),
keywords: &["hore", "nahor", "sever", "šípka"],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("puščica gor"),
keywords: &["glavno", "puščica", "puščica gor", "sever", "smer"],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("fallaarta kor"),
keywords: &[
"afarta jiho",
"falaar",
"falaarta ko",
"fallaarta kor",
"jiho",
"waqooyi",
],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("shigjeta lart"),
keywords: &["drejtim", "kryesore", "shigjeta lart", "shigjetë", "veri"],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелица нагоре"),
keywords: &["сeвeр", "смер", "стрeлицa", "стрелица нагоре"],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("↑↑↑"),
keywords: &["стрелица нагоре"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelica nagore"),
keywords: &["sever", "smer", "strelica", "strelica nagore"],
},
#[cfg(feature = "sr_Latn_BA")]
crate::Annotation {
lang: "sr_Latn_BA",
tts: None,
keywords: &["strelica nagore"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("uppåtpil"),
keywords: &["norr", "pil", "riktning", "uppåtpil", "väderstreck"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mshale unaoelekeza juu"),
keywords: &[
"kaskazini",
"mshale",
"mshale unaoelekeza juu",
"sehemu kuu ya dira",
"uelekeo",
],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some("மேல\u{bcd}நோக\u{bcd}கிய அம\u{bcd}புக\u{bcd}குறி"),
keywords: &[
"அம\u{bcd}புக\u{bcd}குறி",
"க\u{bbe}ர\u{bcd}டினல\u{bcd}",
"திசை",
"மேல\u{bcd}",
"மேல\u{bcd}நோக\u{bcd}கிய அம\u{bcd}புக\u{bcd}குறி",
"வடக\u{bcd}கு",
],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("ఎగువ బ\u{c3e}ణం"),
keywords: &[
"ఉత\u{c4d}తరం",
"ఎగువ బ\u{c3e}ణం",
"క\u{c3e}ర\u{c4d}డ\u{c3f}నల\u{c4d}",
"ద\u{c3f}శ",
"బ\u{c3e}ణం",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("тири \"боло\""),
keywords: &["асосӣ", "самт", "тир", "тири \"боло\"", "шимол"],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("ล\u{e39}กศรช\u{e35}\u{e49}ข\u{e36}\u{e49}น"),
keywords: &[
"ท\u{e34}ศทาง",
"ล\u{e39}กศร",
"ล\u{e39}กศรช\u{e35}\u{e49}ข\u{e36}\u{e49}น",
"เหน\u{e37}อ",
],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("ምልክት ናብ ላዕሊ"),
keywords: &["ምልክት", "ምልክት ናብ ላዕሊ", "ሰሜን", "ኣንፈት", "ካርዲናል"],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("ýokary ok"),
keywords: &["demirgazyk", "göni", "ok", "ugur", "ýokary ok"],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau ki ʻolunga"),
keywords: &["hake", "ngahau", "ngahau ki ʻolunga", "ʻolunga"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("yukarı ok"),
keywords: &[
"ana yön",
"kuzey",
"ok",
"yukarı ok",
"yukarı yönlü ok",
"yön",
],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("ئۈستى كۆرسەتكۈچ"),
keywords: &["ئاساس", "ئۈستى كۆرسەتكۈچ", "شىمال", "كۆرسەتكۈچ", "يۆنىلىش"],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("стрілка вгору"),
keywords: &[
"на північ",
"напрям",
"сторона світу",
"стрілка",
"стрілка вгору",
],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("اوپر کا تیر"),
keywords: &["اوپر کا تیر", "تیر", "سمت", "شمال", "کارڈینل"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("yuqoriga strelka"),
keywords: &["shimol", "strelka", "yo‘nalish", "yuqoriga strelka"],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("mũi tên lên"),
keywords: &["bắc", "chính", "hướng", "mũi tên", "mũi tên lên"],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("fettu kaw"),
keywords: &["fett", "fettu kaw", "gànnaar", "jubluwaay", "kàrdinal"],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("utolo oluphezulu"),
keywords: &[
"emntla",
"ukhardinale",
"ulwalathiso",
"utolo",
"utolo oluphezulu",
],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("àmì òkè ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà"),
keywords: &[
"kádínàlì",
"àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà",
"àmì òkè ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà",
"àríwá",
"ìtọ\u{301}sọ\u{301}nà",
],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("向上箭咀"),
keywords: &["北", "向上箭咀", "基點", "方向", "箭咀"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("向上箭咀"),
keywords: &["北", "向上箭咀", "基点", "方向", "箭咀"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("向上箭头"),
keywords: &["北", "向上箭头", "方向", "标识"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("向上箭頭"),
keywords: &["向上箭頭", "方向"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("上箭嘴"),
keywords: &["上箭嘴", "上箭嘴按鈕", "方向"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("inkomba phezulu"),
keywords: &["enyakatho", "inkomba", "inkomba phezulu", "umcibisholo"],
},
],
};
#[doc = "↗\u{fe0f}"]
pub const UP_RIGHT_ARROW: crate::Emoji = crate::Emoji {
glyph: "↗\u{fe0f}",
codepoint: "2197 FE0F",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "up-right arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[crate::Emoji {
glyph: "↗",
codepoint: "2197",
status: crate::Status::Unqualified,
introduction_version: 0.6f32,
name: "up-right arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: true,
variants: &[],
annotations: &[],
}],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("pyl na regs bo"),
keywords: &["noordoos", "pyl", "pyl na regs bo", "rigting"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("ወደ ላይ ቀኝ ጠቋሚ ቀስት"),
keywords: &["ሰሜን ምሥራቅ", "ቀስት", "አቅጣጫ", "ኢንተርካርዲናል", "ወደ ላይ ቀኝ ጠቋሚ ቀስት"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("سهم لأعلى اليمين"),
keywords: &["اتجاه", "سهم", "سهم لأعلى اليمين", "شمال شرق"],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some(
"সো\u{981}দিশে ঢ\u{9be}ল খোৱ\u{9be} ঊৰ\u{9cd}ধ\u{9cd}বম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}",
),
keywords: &[
"আন\u{9cd}তঃদিশ",
"উত\u{9cd}তৰ-প\u{9c2}ব",
"ক\u{9be}\u{981}ড\u{9bc}",
"দিশ",
"সো\u{981}দিশে ঢ\u{9be}ল খোৱ\u{9be} ঊৰ\u{9cd}ধ\u{9cd}বম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("üzüyuxarı sağa yönəlmiş ox"),
keywords: &[
"interkardinal",
"istiqamət",
"ox",
"üzüyuxarı sağa yönəlmiş ox",
"şimal şərq",
],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("стрэлка ўверх-управа"),
keywords: &[
"кірунак",
"паўночны ўсход",
"стрэлка",
"стрэлка ўверх-управа",
"уверх-управа",
],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("стрелка нагоре и надясно"),
keywords: &[
"посока",
"североизток",
"стрелка",
"стрелка нагоре и надясно",
],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("উপরে ড\u{9be}নে তীর"),
keywords: &[
"আন\u{9cd}তঃ দিগনির\u{9cd}ণয\u{9bc}",
"উত\u{9cd}তর-প\u{9c2}র\u{9cd}ব",
"উপরে ড\u{9be}নে তীর",
"তীর",
"দিক",
],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelica gore-desno"),
keywords: &["sjeveroistok", "smjer", "strelica", "strelica gore-desno"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("fletxa cap amunt a la dreta"),
keywords: &[
"amunt a la dreta",
"direcció",
"fletxa",
"fletxa cap amunt a la dreta",
"nord-est",
],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎦᎸᎳᏗ-ᎠᎦᏘᏏ ᎦᏝᏗ"),
keywords: &[
"ᎤᏴᏝᎧᎸᎬᎢ",
"ᎦᎸᎳᏗ-ᎠᎦᏘᏏ ᎦᏝᏗ",
"ᎦᏝᏗ",
"ᏂᏚᏳᎪᏛᎢ",
"ᏅᎩᏂᏚᏳᎪᏛᎢ ᏂᏚᏓᎸᏗᏎᎯᎯ",
],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("šipka doprava nahoru"),
keywords: &[
"severovýchod",
"směr",
"strany",
"světové",
"vedlejší",
"šipka",
"šipka doprava nahoru",
],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("saeth i fyny-dde"),
keywords: &["Gogledd-ddwyrain", "cyfeiriad", "saeth", "saeth i fyny-dde"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("opadvendt pil mod højre"),
keywords: &["nordøst", "opadvendt pil mod højre", "pil", "retning"],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("Pfeil nach rechts oben"),
keywords: &[
"Nordosten",
"Pfeil",
"Pfeil nach rechts oben",
"nach rechts oben",
],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("πάνω δεξιό βέλος"),
keywords: &[
"βέλος",
"βορειοανατολικά",
"ενδιάμεση",
"κατεύθυνση",
"πάνω δεξιό βέλος",
],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("up-right arrow"),
keywords: &[
"arrow",
"direction",
"intercardinal",
"northeast",
"up-right arrow",
],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flecha hacia la esquina superior derecha"),
keywords: &[
"arriba",
"derecha",
"dirección",
"flecha",
"flecha hacia la esquina superior derecha",
"noreste",
],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("↑↑↑"),
keywords: &[
"derecha",
"dirección",
"flecha",
"flecha hacia el noreste",
"flecha hacia la esquina superior derecha",
"noreste",
],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("nool üles paremale"),
keywords: &["kirre", "nool", "nool üles paremale", "suund"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("gora eta eskuinera gezia"),
keywords: &[
"gezi",
"gora eta eskuinera gezia",
"ipar-ekialde",
"kardinal arteko",
"norabide",
],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("پیکان بالا راست"),
keywords: &["جهت فرعی", "شمال شرقی", "مسیر", "پیکان", "پیکان بالا راست"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("nuoli yläoikealle"),
keywords: &[
"ilmansuunta",
"koillinen",
"nuoli",
"nuoli yläoikealle",
"väli-ilmansuunta",
],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("pataas na pakanan na arrow"),
keywords: &[
"arrow",
"direksyon",
"hilagang-silangan",
"intercardinal",
"pakanan",
"pataas",
"pataas na pakanan na arrow",
],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("pílur sum peikar uppeftir og til høgru"),
keywords: &[
"k´ós",
"landnyrðingur",
"pílur",
"pílur sum peikar uppeftir og til høgru",
"ætt",
],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèche haut droite"),
keywords: &["direction", "flèche", "flèche haut droite", "nord-est"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("flèche pointant vers le haut à droite"),
keywords: &[
"direction",
"en haut à droite",
"flèche",
"flèche pointant vers le haut à droite",
"nord-est",
"point intercardinal",
],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("saighead suas ar dheis"),
keywords: &[
"idirmheánach",
"saighead",
"saighead suas ar dheis",
"soir ó thuaidh",
"treo",
],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("saighead gun ear-thuath"),
keywords: &[
"combaist",
"comhair",
"ear-thuath",
"saighead",
"saighead gun ear-thuath",
"àirde",
],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frecha cara arriba á dereita"),
keywords: &[
"arriba",
"dirección",
"frecha",
"frecha cara arriba á dereita",
"nordeste",
],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("ઉપર-જમણ\u{ac1}\u{a82} તીર"),
keywords: &[
"ઉત\u{acd}તરપ\u{ac2}ર\u{acd}વ",
"ઉપર-જમણ\u{ac1}\u{a82} તીર",
"તીર",
"દિશા",
],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("kibiyar sama ta dama"),
keywords: &[
"a ƙetaren tsini shiyya",
"arewa maso gabas",
"kibiya",
"kibiyar sama ta dama",
"shiyya",
],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("חץ למעלה וימינה"),
keywords: &["חץ", "חץ למעלה וימינה", "כיוון", "צפון-מזרח"],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("ऊपर-दाया\u{901} तीर"),
keywords: &[
"इ\u{902}टरकार\u{94d}डिनल",
"उत\u{94d}तर-प\u{942}र\u{94d}व दिशा",
"ऊपर-दाया\u{901} तीर",
],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("strelica prema gore-desno"),
keywords: &[
"sjeveroistok",
"smjer",
"strelica",
"strelica prema gore-desno",
],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("jobbra felfelé mutató nyíl"),
keywords: &[
"felfelé",
"irány",
"jobbra felfelé mutató nyíl",
"nyíl",
"északkelet",
],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("վերև աջ սլաք"),
keywords: &["հյուսիս-արևելք", "ուղղություն", "սլաք", "վերև աջ սլաք"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah kanan atas"),
keywords: &[
"arah",
"interkardinal",
"panah",
"tanda panah kanan atas",
"timur laut",
],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("ube akanri dị elu"),
keywords: &[
"intakadịnal",
"nduzị",
"ube",
"ube akanri dị elu",
"ugwuọwụwa anyanwụ",
],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("ör ská upp til hægri"),
keywords: &["norðaustur", "átt", "ör", "ör ská upp til hægri"],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("freccia rivolta verso destra che punta in alto"),
keywords: &[
"direzione",
"freccia",
"freccia in alto a destra",
"freccia rivolta verso destra che punta in alto",
"nord-est",
"punto intercardinale",
],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("右上矢印"),
keywords: &["北東", "右上", "右上矢印", "矢印"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("panah munggah-nengen"),
keywords: &[
"arah",
"interkardinal",
"lor-wetan",
"panah",
"panah munggah-nengen",
],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ისარი ზემოთ და მარჯვნივ"),
keywords: &[
"ინტერკარდინალური",
"ისარი",
"ისარი ზემოთ და მარჯვნივ",
"კარდინალური",
"მიმართულება",
"ჩრდილო-აღმოსავლეთი",
],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("aneccab d asawen uẓẓil"),
keywords: &["aneccab d asawen uẓẓil"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("жоғарғы оң жақ көрсеткісі"),
keywords: &[
"бағыт",
"жоғарғы оң жақ көрсеткісі",
"көрсеткі",
"румба аралық",
"солтүстік-шығыс",
],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("opadvendt pil mod højre"),
keywords: &["nordøst", "opadvendt pil mod højre", "pil", "retning"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some(
"ព\u{17d2}រ\u{17bd}ញទៅលើងាកទៅស\u{17d2}តា\u{17c6}ក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}នជ\u{17d2}រ\u{17bb}ង",
),
keywords: &[
"ទ\u{17b7}ស",
"ទ\u{17b7}សដៅ",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញទៅលើងាកទៅស\u{17d2}តា\u{17c6}ក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}នជ\u{17d2}រ\u{17bb}ង",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಮೇಲ\u{cbf}ನ ಬಾಲ ಬಾಣ"),
keywords: &[
"ಈಶಾನ\u{ccd}ಯ",
"ದ\u{cbf}ಕ\u{ccd}ಕು",
"ನ\u{cbf}ರ\u{ccd}ದೇಶನ",
"ಬಾಣದ ಗುರುತು",
"ಮೇಲ\u{cbf}ನ ಬಾಲ ಬಾಣ",
],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("우상향 화살표"),
keywords: &["북동쪽", "우상향 화살표", "화살표"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("वयर-उजवो बाण"),
keywords: &["ईशान\u{94d}य", "उपदिशा", "दिशा", "बाण", "वयर-उजवो बाण"],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("өйдө оң жакты караган жебе"),
keywords: &[
"багыт",
"жебе",
"түндүк-чыгыш",
"өйдө оң жакты караган жебе",
],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("Feil no uewen a riets"),
keywords: &[
"Feil",
"Feil no uewen a riets",
"Himmelsrichtung",
"Nordosten",
"Richtung",
],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນຂ\u{eb6}\u{ec9}ນມ\u{eb8}ມຂວາ"),
keywords: &[
"ຂ\u{eb6}\u{ec9}ນ",
"ທ\u{eb4}ດທາງ",
"ມ\u{eb8}ມຂວາ",
"ລ\u{eb9}ກສອນ",
"ລ\u{eb9}ກສອນຂ\u{eb6}\u{ec9}ນມ\u{eb8}ມຂວາ",
],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("rodyklė į viršų ir į dešinę"),
keywords: &[
"kryptis",
"rodyklė",
"rodyklė į viršų ir į dešinę",
"šiaurės rytai",
],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("uz augšējo labo stūri vērsta bultiņa"),
keywords: &[
"bultiņa",
"norāde",
"uz augšējo labo stūri vērsta bultiņa",
"ziemeļaustrumi",
],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pere ake-matau"),
keywords: &[
"ahunga",
"pere",
"pere ake-matau",
"rāwhiti-mā-raki",
"waenga-matua",
],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("стрелка нагоре десно"),
keywords: &["горе", "североисток", "стрелка", "стрелка нагоре десно"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some("മ\u{d41}കളിൽ വലത\u{d4d}തേക\u{d4d}ക\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളം"),
keywords: &[
"അമ\u{d4d}പടയ\u{d3e}ളം",
"ഇന\u{d4d}റർക\u{d3e}ർഡിനൽ",
"ദിശ",
"നോർത\u{d4d}ത\u{d4d} ഈസ\u{d4d}\u{200c}റ\u{d4d}റ\u{d4d}",
"മ\u{d41}കളിൽ വലത\u{d4d}തേക\u{d4d}ക\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളം",
"വടക\u{d4d}ക\u{d4d} കിഴക\u{d4d}കൻ",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("баруун дээшээ сум"),
keywords: &["баруун дээшээ сум", "зүүн хойд", "сум", "чиглэл"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("वर उजवीकड\u{947} दर\u{94d}शविणारा बाण"),
keywords: &[
"अ\u{902}तर\u{94d}गत महत\u{94d}वाच\u{947}",
"उत\u{94d}तरप\u{942}र\u{94d}व",
"दिशा",
"बाण",
"वर उजवीकड\u{947} दर\u{94d}शविणारा बाण",
],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("anak panah penjuru atas kanan"),
keywords: &[
"anak panah",
"anak panah penjuru atas kanan",
"antara kardinal",
"arah",
"timur laut",
],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("vleġġa ’l fuq leminija"),
keywords: &[
"direzzjoni",
"grigal",
"interkardinal",
"vleġġa",
"vleġġa ’l fuq leminija",
],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("ညာဘက\u{103a}အပေါ\u{103a}ည\u{103d}\u{103e}န\u{103a} မြား"),
keywords: &[
"ညာဘက\u{103a} အပေါ\u{103a}ည\u{103d}\u{103e}န\u{103a}မြား",
"ညာဘက\u{103a}အပေါ\u{103a}ည\u{103d}\u{103e}န\u{103a} မြား",
"ဌာနတ\u{103d}င\u{103a}း အရေးအကြ\u{102e}းဆ\u{102f}\u{1036}း",
"မြား",
"လမ\u{103a}းည\u{103d}\u{103e}န\u{103a}",
"အရ\u{103e}ေ\u{1037}မြောက\u{103a}",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("pil opp-høyre"),
keywords: &["nordøst", "pil", "pil opp-høyre", "retning"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("माथि-दाया\u{901} फर\u{94d}क\u{947}को तीर"),
keywords: &[
"उत\u{94d}तरदक\u{94d}षीण",
"कार\u{94d}डिनल विपरित",
"तीर",
"दिशा",
"माथि-दाया\u{901} फर\u{94d}क\u{947}को तीर",
],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("pijl rechtsomhoog"),
keywords: &[
"noordoost",
"pijl",
"pijl rechtsomhoog",
"richting",
"windrichting",
],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("pil opp-høgre"),
keywords: &["nordaust", "pil", "pil opp-høgre", "retning"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ଉପର-ଡ\u{b3e}ହ\u{b3e}ଣ ତୀର"),
keywords: &[
"ଅନ\u{b4d}ତଃପ\u{b4d}ରଧ\u{b3e}ନ",
"ଉତ\u{b4d}ତରପ\u{b42}ର\u{b4d}ବ",
"ଉପର-ଡ\u{b3e}ହ\u{b3e}ଣ ତୀର",
"ତୀର",
"ଦ\u{b3f}ଗ",
],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਉ\u{a71}ਪਰ-ਸ\u{a71}ਜ\u{a47} ਤੀਰ"),
keywords: &[
"ਇ\u{a70}ਟਰਕਾਰਡੀਨਲ",
"ਉ\u{a71}ਤਰ-ਪ\u{a42}ਰਬ",
"ਉ\u{a71}ਪਰ-ਸ\u{a71}ਜ\u{a47} ਤੀਰ",
"ਤੀਰ",
"ਦਿਸ\u{a3c}ਾ",
],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("ا\u{64f}وتلا-سجا تیر"),
keywords: &[
"انٹر کارڈینل",
"ا\u{64f}وتلا-سجا تیر",
"تیر",
"سمت",
"شمال مشرق",
],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("Áro De Pọínt Ọ\u{301}p-Rait"),
keywords: &[
"Dairẹ\u{301}kshọn",
"Intakádínal",
"Nọ\u{301}tist",
"Áro",
"Áro De Pọínt Ọ\u{301}p-Rait",
],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("strzałka w górę w prawo"),
keywords: &[
"kierunek",
"północny wschód",
"strzałka",
"strzałka do góry w prawo",
"strzałka w górę w prawo",
],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("پورته ښي غشی"),
keywords: &["انتر کارډينل", "اړخ", "شمال ختيځ", "غشی", "پورته ښي غشی"],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("seta para cima e para a direita"),
keywords: &[
"direção",
"intercardinal",
"nordeste",
"seta",
"seta para cima e para a direita",
],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("seta para cima e para a direita"),
keywords: &[
"direção",
"intermédio",
"nordeste",
"seta",
"seta para cima e para a direita",
],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("hanaq paña wach’i"),
keywords: &["hanaq paña wach’i"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("săgeată orientată în dreapta-sus"),
keywords: &[
"direcție",
"intercardinal",
"nord-est",
"săgeată",
"săgeată orientată în dreapta-sus",
],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-009"),
keywords: &["E10-009"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("стрелка вправо-вверх"),
keywords: &["вправо-вверх", "направление", "северо-восток", "стрелка"],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("umwambi w’ahagana ejuru iburyo"),
keywords: &[
"amajyaruguru ashyira iburasirazuba",
"icyerekezo",
"kiri hagati y’ibyerekezo bine",
"umwambi",
"umwambi w’ahagana ejuru iburyo",
],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("مٿي ساڄي تير"),
keywords: &["اتراوڀر", "انٽرڪارڊينل", "تير", "طرف", "مٿي ساڄي تير"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("උඩ\u{dd4}-දක\u{dd4}ණ\u{dd4} ඊතලය"),
keywords: &[
"ඉන\u{dca}ටර\u{dca}ක\u{dcf}ඩ\u{dd2}නල\u{dca}",
"ඊතලය",
"ඊස\u{dcf}න",
"උඩ\u{dd4}-දක\u{dd4}ණ\u{dd4} ඊතලය",
"ද\u{dd2}ශ\u{dcf}ව",
],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("šípka doprava nahor"),
keywords: &[
"doprava nahor",
"severovýchod",
"šípka",
"šípka doprava nahor",
],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("puščica desno gor"),
keywords: &[
"puščica",
"puščica desno gor",
"severovzhod",
"smer",
"stransko",
],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("fallaarta kor ee midig"),
keywords: &[
"fallaar",
"fallaarta kor ee midig",
"isdhaafsiga jihooyinka",
"jihada",
"waqooyi bari",
],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("shigjeta lart-djathtas"),
keywords: &[
"drejtim",
"i ndërmjetëm",
"shigjeta lart-djathtas",
"shigjetë",
"verilindje",
],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелица нагоре и надесно"),
keywords: &[
"сeвeрoистoк",
"смер",
"стрeлицa",
"стрелица нагоре и надесно",
],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("↑↑↑"),
keywords: &["стрелица нагоре и надесно"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelica nagore i nadesno"),
keywords: &[
"severoistok",
"smer",
"strelica",
"strelica nagore i nadesno",
],
},
#[cfg(feature = "sr_Latn_BA")]
crate::Annotation {
lang: "sr_Latn_BA",
tts: None,
keywords: &["strelica nagore i nadesno"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("uppåtpil höger"),
keywords: &["höger", "pil", "uppåt", "uppåtpil höger"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mshale unaoelekeza juu kulia"),
keywords: &[
"kaskazini mashariki",
"kati ya sehemu kuu ya dira",
"mshale",
"mshale unaoelekeza juu kulia",
"uelekeo",
],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some("மேல\u{bcd}-வலது நோக\u{bcd}கிய அம\u{bcd}புக\u{bcd}குறி"),
keywords: &[
"திசை",
"மேல\u{bcd}-வலது நோக\u{bcd}கிய அம\u{bcd}புக\u{bcd}குறி",
"வட கிழக\u{bcd}கு",
],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("ఎగువ కుడ\u{c3f} మూల బ\u{c3e}ణం"),
keywords: &[
"ఈశ\u{c3e}న\u{c4d}యం",
"ఎగువ కుడ\u{c3f} మూల బ\u{c3e}ణం",
"ద\u{c3f}శ",
"బ\u{c3e}ణం",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("тири болову рост"),
keywords: &["мобайнӣ", "самт", "тир", "тири болову рост", "шимолу шарқ"],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("ล\u{e39}กศรช\u{e35}\u{e49}ม\u{e38}มขวาบน"),
keywords: &[
"ตะว\u{e31}นออกเฉ\u{e35}ยงเหน\u{e37}อ",
"ท\u{e34}ศทาง",
"ล\u{e39}กศรช\u{e35}\u{e49}ม\u{e38}มขวาบน",
"ล\u{e39}\u{e39}กศร",
],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("ምልክት ናብ-የማናይ ላዕሊ"),
keywords: &[
"ምልክት",
"ምልክት ናብ-የማናይ ላዕሊ",
"ሰሜናዊ ምብራቅ",
"ኣንፈት",
"ውሽጣዊ ናይ ካርዲናል",
],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("ýokary-saga ok"),
keywords: &[
"demirgazyk-gündogar",
"gytak",
"ok",
"ugur",
"ýokary-saga ok",
],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau ki ʻolunga toʻomataʻu"),
keywords: &[
"mataʻu",
"ngahau",
"ngahau ki ʻolunga toʻomataʻu",
"ʻolunga",
],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("sağ yukarı ok"),
keywords: &["ara yön", "kuzey doğu", "ok", "sağ yukarı ok", "yön"],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("ئۈستى-ئوڭ كۆرسەتكۈچ"),
keywords: &[
"ئارا يۆنىلىشلىك",
"ئۈستى-ئوڭ كۆرسەتكۈچ",
"شەرقىي-شىمال",
"كۆرسەتكۈچ",
"يۆنىلىش",
],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("стрілка вгору вправо"),
keywords: &[
"на північний схід",
"напрям",
"стрілка",
"стрілка вгору вправо",
"четвертинний румб",
],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("اوپر دائیں تیر"),
keywords: &["اوپر دائیں تیر", "تیر", "سائن", "سمت", "شمال مشرق"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("o‘ng-yuqoriga strelka"),
keywords: &[
"chiziq",
"o‘ng-yuqoriga strelka",
"shimoli-sharq",
"yo‘nalish",
],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("mũi tên lên bên phải"),
keywords: &[
"hướng",
"mũi tên",
"mũi tên lên bên phải",
"nhiều hướng",
"đông bắc",
],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("fettu ndijooru kaw"),
keywords: &[
"fett",
"fettu ndijooru kaw",
"interkàrdinal",
"jubluwaay",
"penku-gànnaar",
],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("utolo olujonge phezulu"),
keywords: &[
"empuma-mzantsi",
"ukhardinale",
"ulwalathiso",
"utolo",
"utolo olujonge phezulu",
],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("àmì òkè ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà apá ọ\u{300}tún"),
keywords: &[
"ilà oòrùn àríwá",
"àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà",
"àmì òkè ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà apá ọ\u{300}tún",
"ìlọ\u{301}pọ\u{300} kádínàlì",
"ìtọ\u{301}sọ\u{301}nà",
],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("向右上箭咀"),
keywords: &["向右上箭咀", "基點間", "方向", "東北", "箭咀"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("向右上箭咀"),
keywords: &["东北", "向右上箭咀", "基点间", "方向", "箭咀"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("右上箭头"),
keywords: &["东北", "右上箭头", "方向", "标识"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("右上箭頭"),
keywords: &["右上箭頭", "方向"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("上右箭嘴"),
keywords: &["上右箭嘴", "方向"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("Inkomba phezulu-kwesokudla"),
keywords: &[
"Inkomba phezulu-kwesokudla",
"enyakathompumalanga",
"inkomba",
"inkombakuhlanganisa",
"umcibisholo",
],
},
],
};
#[doc = "➡\u{fe0f}"]
pub const RIGHT_ARROW: crate::Emoji = crate::Emoji {
glyph: "➡\u{fe0f}",
codepoint: "27A1 FE0F",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "right arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[crate::Emoji {
glyph: "➡",
codepoint: "27A1",
status: crate::Status::Unqualified,
introduction_version: 0.6f32,
name: "right arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: true,
variants: &[],
annotations: &[],
}],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("pyl na regs"),
keywords: &["oos", "pyl", "pyl na regs", "regs", "rigting"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("ወደ ቀኝ ጠቋሚ ቀስት"),
keywords: &["ምሥራቅ", "ቀስት", "አቅጣጫ", "ካርዲናል", "ወደ ቀኝ ጠቋሚ ቀስት"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("سهم لليمين"),
keywords: &["اتجاه", "سهم", "سهم لليمين", "شرق", "كاردينال"],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some("সো\u{981}ম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}"),
keywords: &[
"ক\u{9be}\u{981}ড\u{9bc}",
"দিশ",
"প\u{9c2}ব",
"ম\u{9c1}খ\u{9cd}য দিশবিল\u{9be}ক",
"সো\u{981}ম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("sağa ox"),
keywords: &["istiqamət", "ox", "sağa ox", "şərq", "əsas"],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("стрэлка ўправа"),
keywords: &["кірунак", "стрэлка", "стрэлка ўправа", "управа", "усход"],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("стрелка надясно"),
keywords: &[
"Стрелка надясно",
"изток",
"посока",
"стрелка",
"стрелка надясно",
],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("ড\u{9be}নদিকের তীর"),
keywords: &[
"ড\u{9be}ন",
"ড\u{9be}নদিকের তীর",
"তীর",
"দিকনির\u{9cd}দেশ",
"পরিম\u{9be}ণব\u{9be}চক",
"প\u{9c2}র\u{9cd}ব",
],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelica nadesno"),
keywords: &["istok", "smjer", "strelica", "strelica nadesno"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("fletxa cap a la dreta"),
keywords: &[
"direcció",
"dreta",
"est",
"fletxa",
"fletxa cap a la dreta",
],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎠᎦᏘᏏ ᎦᏝᏗ"),
keywords: &["ᎠᎦᏘᏏ ᎦᏝᏗ", "ᎦᏝᏗ", "ᎧᎸᎬᎢ", "ᏂᏚᏳᎪᏛᎢ", "ᏧᎵᏍᎨᏓ"],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("šipka doprava"),
keywords: &[
"hlavní",
"směr",
"strany",
"světové",
"východ",
"šipka",
"šipka doprava",
],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("saeth i’r dde"),
keywords: &["Dwyrain", "cyfeiriad", "saeth", "saeth i’r dde"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("pil mod højre"),
keywords: &["pil", "pil mod højre", "retning", "verdenshjørne", "øst"],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("Pfeil nach rechts"),
keywords: &[
"Osten",
"Pfeil",
"Pfeil nach rechts",
"Rechtspfeil",
"nach rechts",
],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("δεξιό βέλος"),
keywords: &["ανατολικά", "απόλυτη", "βέλος", "δεξιό βέλος", "κατεύθυνση"],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("right arrow"),
keywords: &["arrow", "cardinal", "direction", "east", "right arrow"],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flecha hacia la derecha"),
keywords: &[
"derecha",
"dirección",
"este",
"flecha",
"flecha hacia la derecha",
],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("↑↑↑"),
keywords: &[
"derecha",
"dirección",
"este",
"flecha",
"flecha hacia el este",
"flecha hacia la derecha",
],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("nool paremale"),
keywords: &["ida", "nool", "nool paremale", "suund"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("eskuinera gezia"),
keywords: &["ekialde", "eskuinera gezia", "gezi", "kardinal", "norabide"],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("پیکان راست"),
keywords: &["جهت اصلی", "شرق", "مسیر", "پیکان", "پیکان راست"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("nuoli oikealle"),
keywords: &[
"ilmansuunta",
"itä",
"nuoli",
"nuoli oikealle",
"pääilmansuunta",
],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("pakanang arrow"),
keywords: &[
"arrow",
"cardinal",
"direksyon",
"pakanan",
"pakanang arrow",
"silangan",
],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("pílur sum peikar til høgru"),
keywords: &[
"eystur",
"k´ós",
"pílur",
"pílur sum peikar til høgru",
"ætt",
],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèche droite"),
keywords: &["direction", "est", "flèche", "flèche droite"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("flèche pointant vers la droite"),
keywords: &[
"direction",
"est",
"flèche",
"flèche pointant vers la droite",
"point cardinal",
"à droite",
],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("saighead ar dheis"),
keywords: &["príomhaird", "saighead", "saighead ar dheis", "soir"],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("saighead gun ear"),
keywords: &["saighead gun ear", "saighead gun ear-dheas"],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frecha cara á dereita"),
keywords: &["dereita", "dirección", "frecha cara á dereita", "leste"],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("જમણ\u{ac1}\u{a82} તીર"),
keywords: &["જમણ\u{ac1}\u{a82} તીર", "તીર", "દિશા", "પ\u{ac2}ર\u{acd}વ"],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("kibiyar dama"),
keywords: &["gabas", "kibiya", "kibiyar dama", "shiyya", "tsinin shiyya"],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("חץ ימינה"),
keywords: &["חץ", "ימינה", "כיוון", "מזרח"],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("दाया\u{901} तीर"),
keywords: &[
"कार\u{94d}डिनल",
"तीर",
"दाया\u{901} तीर",
"दिशा",
"प\u{942}र\u{94d}व दिशा",
],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("strelica prema desno"),
keywords: &["istok", "smjer", "strelica", "strelica prema desno"],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("jobbra mutató nyíl"),
keywords: &["irány", "jobbra", "jobbra mutató nyíl", "kelet", "nyíl"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("աջ սլաք"),
keywords: &["աջ սլաք", "արևելք", "գլխավոր", "ուղղություն", "սլաք"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah kanan"),
keywords: &["arah", "kardinal", "panah", "tanda panah kanan", "timur"],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("ube akanri"),
keywords: &["kadịnal", "nduzị", "ube", "ube akanri", "ọwụwa anyanwụ"],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("ör til hægri"),
keywords: &["austur", "höfuðátt", "átt", "ör", "ör til hægri"],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("freccia rivolta verso destra"),
keywords: &[
"direzione",
"est",
"freccia",
"freccia a destra",
"freccia rivolta verso destra",
"punto cardinale",
],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("右矢印"),
keywords: &["右", "右矢印", "東", "矢印"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("panah nengen"),
keywords: &["arah", "kardinal", "panah", "panah nengen", "wetan"],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ისარი მარჯვნივ"),
keywords: &[
"აღმოსავლეთი",
"ისარი",
"ისარი მარჯვნივ",
"კარდინალური",
"მიმართულება",
],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("aneccab ayeffus"),
keywords: &["aneccab ayeffus"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("оң жақ бағытты көрсеткі"),
keywords: &[
"бағыт",
"көрсеткі",
"негізгі",
"оң жақ бағытты көрсеткі",
"шығыс",
],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("pil mod højre"),
keywords: &["pil", "pil mod højre", "retning", "verdenshjørne", "øst"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some("ព\u{17d2}រ\u{17bd}ញទៅស\u{17d2}តា\u{17c6}"),
keywords: &[
"កើត",
"ទ\u{17b7}ស",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញទៅស\u{17d2}តា\u{17c6}",
"ស\u{17d2}តា\u{17c6}",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಬಲಭಾಗದ ಬಾಣ"),
keywords: &[
"ಕಾರ\u{ccd}ಡ\u{cbf}ನಲ\u{ccd}\u{200c}",
"ದ\u{cbf}ಕ\u{ccd}ಕು",
"ಪ\u{cc2}ರ\u{ccd}ವ",
"ಬಲಭಾಗದ ಬಾಣ",
"ಬಾಣ",
],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("우향 화살표"),
keywords: &["동쪽", "우향 화살표", "화살표"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("उजवो बाण"),
keywords: &[
"उजवो बाण",
"उद\u{947}\u{902}त",
"दिशा",
"बाण",
"म\u{941}ख\u{947}ल दिशा",
],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("оң жакты караган жебе"),
keywords: &["багыт", "жебе", "оң жакты караган жебе", "чыгыш"],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("Feil no riets"),
keywords: &[
"Feil",
"Feil no riets",
"Himmelsrichtung",
"Osten",
"Richtung",
],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນຂວາ"),
keywords: &["ຂວາ", "ທ\u{eb4}ດທາງ", "ລ\u{eb9}ກສອນ", "ລ\u{eb9}ກສອນຂວາ"],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("rodyklė į dešinę"),
keywords: &[
"koordinatė",
"kryptis",
"rodyklė",
"rodyklė į dešinę",
"rytai",
],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("labā bultiņa"),
keywords: &[
"austrumi",
"bultiņa",
"labā bultiņa",
"norāde",
"pa labi vērsta bultiņa",
],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pere matau"),
keywords: &["ahunga", "matua", "pere", "pere matau", "rāwhiti"],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("стрелка надесно"),
keywords: &["десно", "исток", "стрелка", "стрелка надесно"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some(
"വലത\u{d4d}തേക\u{d4d}ക\u{d4d} ച\u{d42}ണ\u{d4d}ട\u{d41}ന\u{d4d}ന അമ\u{d4d}പടയ\u{d3e}ളം",
),
keywords: &[
"അമ\u{d4d}പടയ\u{d3e}ളം",
"ക\u{d3e}ർഡിനൽ",
"കിഴക\u{d4d}ക\u{d4d}",
"ദിശ",
"വലത\u{d4d}തേക\u{d4d}ക\u{d4d} ച\u{d42}ണ\u{d4d}ട\u{d41}ന\u{d4d}ന അമ\u{d4d}പടയ\u{d3e}ളം",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("баруун сум"),
keywords: &["баруун сум", "зүүн", "кардинал", "сум", "чиглэл"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("उजवा बाण"),
keywords: &[
"उजवा बाण",
"दिशा",
"प\u{942}र\u{94d}व",
"बाण",
"महत\u{94d}वाच\u{947}",
],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("anak panah ke kanan"),
keywords: &[
"anak panah",
"anak panah ke kanan",
"arah",
"kardinal",
"timur",
],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("vleġġa leminija"),
keywords: &[
"direzzjoni",
"il-lvant",
"kardinal",
"vleġġa",
"vleġġa leminija",
],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("ညာည\u{103d}\u{103e}န\u{103a}မြား"),
keywords: &[
"ညာည\u{103d}\u{103e}န\u{103a}မြား",
"မြား",
"လမ\u{103a}းည\u{103d}\u{103e}န\u{103a}",
"အရပ\u{103a}မျက\u{103a}န\u{103e}ာပြ အမ\u{103e}တ\u{103a}အသား",
"အရ\u{103e}ေ\u{1037}",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("pil høyre"),
keywords: &["høyrepil", "pil", "pil høyre", "retning", "øst"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("दाया\u{901} तर\u{94d}फको तीर"),
keywords: &[
"कार\u{94d}डिनल",
"तीर",
"दाया\u{901} तर\u{94d}फको तीर",
"दिशा",
"प\u{942}र\u{94d}व",
],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("pijl naar rechts"),
keywords: &[
"oost",
"pijl",
"pijl naar rechts",
"richting",
"windrichting",
],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("pil høgre"),
keywords: &["aust", "høgrepil", "pil", "pil høgre", "retning"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ଡ\u{b3e}ହ\u{b3e}ଣ ତୀର"),
keywords: &[
"ଡ\u{b3e}ହ\u{b3e}ଣ ତୀର",
"ତୀର",
"ଦ\u{b3f}ଗ",
"ପ\u{b42}ର\u{b4d}ବ",
"ପ\u{b4d}ରଧ\u{b3e}ନ",
],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਸ\u{a71}ਜ\u{a47} ਤੀਰ"),
keywords: &[
"ਕਾਰਡੀਨਲ",
"ਤੀਰ",
"ਦਿਸ\u{a3c}ਾ",
"ਪ\u{a42}ਰਬ",
"ਸ\u{a71}ਜ\u{a47} ਤੀਰ",
],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("سجا تیر"),
keywords: &["اہم", "تیر", "سجا تیر", "سمت", "مشرق"],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("Áro De Pọínt Rait"),
keywords: &[
"Dairẹ\u{301}kshọn",
"Ist",
"Kádínal",
"Áro",
"Áro De Pọínt Rait",
],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("strzałka w prawo"),
keywords: &["kierunek", "strzałka", "strzałka w prawo", "wschód"],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("ښي غشی"),
keywords: &["اساسي", "اړخ", "ختيځ", "غشی", "ښي غشی"],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("seta para a direita"),
keywords: &["direita", "direção", "leste", "seta", "seta para a direita"],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("↑↑↑"),
keywords: &["cardeal", "direção", "este", "seta", "seta para a direita"],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("paña wach’i"),
keywords: &["paña wach’i"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("săgeată orientată în dreapta"),
keywords: &[
"cardinal",
"direcție",
"est",
"săgeată",
"săgeată orientată în dreapta",
],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-151"),
keywords: &["E10-151"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("стрелка вправо"),
keywords: &["восток", "вправо", "направление", "стрелка"],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("akambi k’iburyo"),
keywords: &[
"akambi k’iburyo",
"iburasirazuba",
"icyerekezo",
"kimwe mu byerekezo bine",
"umwambi",
],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("ساڄي تير"),
keywords: &["اوڀر", "تير", "ساڄي تير", "طرف", "ڪارڊينل"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("දක\u{dd4}ණ\u{dd4} ඊතලය"),
keywords: &[
"ඊතලය",
"ක\u{dcf}\u{dcf}ඩ\u{dd2}නල\u{dca}",
"දක\u{dd4}ණ\u{dd4} ඊතලය",
"ද\u{dd2}ශ\u{dcf}ව",
"නැගෙනහ\u{dd2}ර",
],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("šípka doprava"),
keywords: &["doprava", "východ", "šípka"],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("puščica desno"),
keywords: &["glavno", "puščica", "puščica desno", "smer", "vzhod"],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("fallarta midig"),
keywords: &[
"afarta jiho",
"bari",
"fallaar",
"fallaarta midig",
"fallarta midig",
"jiho",
],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("shigjetë djathtas"),
keywords: &[
"drejtim",
"kryesore",
"lindje",
"shigjetë",
"shigjetë djathtas",
],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелица надесно"),
keywords: &["истoк", "смер", "стрeлицa", "стрелица надесно"],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("↑↑↑"),
keywords: &["стрелица надесно"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelica nadesno"),
keywords: &["istok", "smer", "strelica", "strelica nadesno"],
},
#[cfg(feature = "sr_Latn_BA")]
crate::Annotation {
lang: "sr_Latn_BA",
tts: None,
keywords: &["strelica nadesno"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("högerpil"),
keywords: &["högerpil", "pil", "riktning", "väderstreck", "öster"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mshale unaoelekeza kulia"),
keywords: &[
"mashariki",
"mshale",
"mshale unaoelekeza kulia",
"sehemu kuu ya dira",
"uelekeo",
],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some("வலது நோக\u{bcd}கிய அம\u{bcd}புக\u{bcd}குறி"),
keywords: &[
"அம\u{bcd}புக\u{bcd}குறி",
"க\u{bbe}ர\u{bcd}டினல\u{bcd}",
"கிழக\u{bcd}கு",
"திசை",
"வலது",
"வலது நோக\u{bcd}கிய அம\u{bcd}புக\u{bcd}குறி",
],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("కుడ\u{c3f} బ\u{c3e}ణం"),
keywords: &[
"క\u{c3e}ర\u{c4d}డ\u{c3f}నల\u{c4d}",
"కుడ\u{c3f} బ\u{c3e}ణం",
"తూర\u{c4d}పు",
"ద\u{c3f}శ",
"బ\u{c3e}ణం",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("тири рост"),
keywords: &["асосӣ", "самт", "тир", "тири рост", "шарқ"],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("ล\u{e39}กศรช\u{e35}\u{e49}ไปทางขวา"),
keywords: &[
"ตะว\u{e31}นออก",
"ท\u{e34}ศทาง",
"ล\u{e39}กศร",
"ล\u{e39}กศรช\u{e35}\u{e49}ไปทางขวา",
],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("የማናይ ምልክት"),
keywords: &["ምልክት", "ምብራቅ", "ኣንፈት", "ካርዲናል", "የማናይ ምልክት"],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("saga ok"),
keywords: &["göni", "gündogar", "ok", "saga ok", "ugur"],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau ki toʻomataʻu"),
keywords: &["mataʻu", "ngahau", "ngahau ki toʻomataʻu"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("sağ ok"),
keywords: &["ana yön", "doğu", "ok", "sağ ok", "yön"],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("ئوڭ كۆرسەتكۈچ"),
keywords: &["ئاساس", "ئوڭ كۆرسەتكۈچ", "شەرق", "كۆرسەتكۈچ", "يۆنىلىش"],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("стрілка вправо"),
keywords: &[
"напрям",
"сторона",
"стрілка",
"стрілка вправо",
"стрілка праворуч",
"східний",
],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("دائیں تیر"),
keywords: &["تیر", "دائیں تیر", "سمت", "مشرق", "کارڈینل"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("oʻngga strelka"),
keywords: &[
"oʻngga strelka",
"o‘ngga strelka",
"sharq",
"strelka",
"yo‘nalish",
],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("mũi tên phải"),
keywords: &["chính", "hướng", "mũi tên", "mũi tên phải", "đông"],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("fettu ndijoor"),
keywords: &["fett", "fettu ndijoor", "jubluwaay", "kàrdinal", "penku"],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("utolo olusekunene"),
keywords: &[
"empuma",
"ukhardinale",
"ulwalathiso",
"utolo",
"utolo olusekunene",
],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà apá ọ\u{300}tún"),
keywords: &[
"ilà oòrùn",
"kádínàlì",
"àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà",
"àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà apá ọ\u{300}tún",
"ìtọ\u{301}sọ\u{301}nà",
],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("向右箭咀"),
keywords: &["向右箭咀", "基點", "方向", "東", "箭咀"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("向右箭咀"),
keywords: &["东", "向右箭咀", "基点", "方向", "箭咀"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("向右箭头"),
keywords: &["东", "向右箭头", "方向", "标识"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("向右箭頭"),
keywords: &["向右箭頭", "方向"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("右箭嘴"),
keywords: &["右箭嘴", "向右箭頭", "方向"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("Inkomba kwesokudla"),
keywords: &[
"Inkomba kwesokudla",
"empumalanga",
"inkomba",
"umcibisholo",
],
},
],
};
#[doc = "↘\u{fe0f}"]
pub const DOWN_RIGHT_ARROW: crate::Emoji = crate::Emoji {
glyph: "↘\u{fe0f}",
codepoint: "2198 FE0F",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "down-right arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[crate::Emoji {
glyph: "↘",
codepoint: "2198",
status: crate::Status::Unqualified,
introduction_version: 0.6f32,
name: "down-right arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: true,
variants: &[],
annotations: &[],
}],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("pyl na regs onder"),
keywords: &["pyl", "pyl na regs onder", "rigting", "suidoos"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("ወደ ታች ቀኝ ጠቋሚ ቀስት"),
keywords: &["ቀስት", "አቅጣጫ", "ኢንተርካርዲናል", "ወደ ታች ቀኝ ጠቋሚ ቀስት", "ደቡብ ምሥራቅ"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("سهم لأسفل اليمين"),
keywords: &["اتجاه", "جنوب شرق", "سهم", "سهم لأسفل اليمين"],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some("অধোগ\u{9be}মী সো\u{981}ম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}"),
keywords: &[
"অধোগ\u{9be}মী সো\u{981}ম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}",
"আন\u{9cd}তঃদিশ",
"ক\u{9be}\u{981}ড\u{9bc}",
"দক\u{9cd}ষিণ-প\u{9c2}ব",
"দিশ",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("üzüaşağı sağa yönəlmiş ox"),
keywords: &[
"cənub şərq",
"interkardinal",
"istiqamət",
"ox",
"üzüaşağı sağa yönəlmiş ox",
],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("стрэлка ўніз-управа"),
keywords: &[
"кірунак",
"паўднёвы ўсход",
"стрэлка",
"стрэлка ўніз-управа",
"уніз-управа",
],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("стрелка надолу и надясно"),
keywords: &["посока", "стрелка", "стрелка надолу и надясно", "югоизток"],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("নীচে ড\u{9be}নে তীর"),
keywords: &[
"আন\u{9cd}তঃ দিগনির\u{9cd}ণয\u{9bc}",
"তীর",
"দক\u{9cd}ষিণ-প\u{9c2}র\u{9cd}ব",
"দিক",
"নীচে ড\u{9be}নে তীর",
],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelica dolje-desno"),
keywords: &["jugoistok", "smjer", "strelica", "strelica dolje-desno"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("fletxa cap avall a la dreta"),
keywords: &[
"avall a la dreta",
"direcció",
"fletxa",
"fletxa cap avall a la dreta",
"sud-est",
],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎡᎳᏗ-ᎠᎦᏘᏏ ᎦᏝᏗ"),
keywords: &[
"ᎡᎳᏗ-ᎠᎦᏘᏏ ᎦᏝᏗ",
"ᎤᎦᏅᏮᎧᎸᎬᎢ",
"ᎦᏝᏗ",
"ᏂᏚᏳᎪᏛᎢ",
"ᏅᎩᏂᏚᏳᎪᏛᎢ ᏂᏚᏓᎸᏗᏎᎯᎯ",
],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("šipka doprava dolů"),
keywords: &[
"jihovýchod",
"směr",
"strany",
"světové",
"vedlejší",
"šipka",
"šipka doprava dolů",
],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("saeth i lawr-dde"),
keywords: &["De-ddwyrain", "cyfeiriad", "saeth", "saeth i lawr-dde"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("nedadvendt pil mod højre"),
keywords: &["nedadvendt pil mod højre", "pil", "retning", "sydøst"],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("Pfeil nach rechts unten"),
keywords: &[
"Pfeil",
"Pfeil nach rechts unten",
"Südosten",
"nach rechts unten",
],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("κάτω δεξιό βέλος"),
keywords: &[
"βέλος",
"ενδιάμεση",
"κάτω δεξιό βέλος",
"κατεύθυνση",
"νοτιοανατολικά",
],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("down-right arrow"),
keywords: &[
"arrow",
"direction",
"down-right arrow",
"intercardinal",
"southeast",
],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flecha hacia la esquina inferior derecha"),
keywords: &[
"abajo",
"derecha",
"dirección",
"flecha",
"flecha hacia la esquina inferior derecha",
"sudeste",
],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("flecha hacia el sureste"),
keywords: &["flecha hacia el sureste"],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("nool alla paremale"),
keywords: &["kagu", "nool", "nool alla paremale", "suund"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("behera eta eskuinera gezia"),
keywords: &[
"behera eta eskuinera gezia",
"gezi",
"hego-ekialde",
"kardinal arteko",
"norabide",
],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("پیکان پایین راست"),
keywords: &["جنوب شرقی", "جهت فرعی", "مسیر", "پیکان", "پیکان پایین راست"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("nuoli alaoikealle"),
keywords: &[
"ilmansuunta",
"kaakko",
"nuoli",
"nuoli alaoikealle",
"väli-ilmansuunta",
],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("pababang pakanan na arrow"),
keywords: &[
"arrow",
"direksyon",
"intercardinal",
"pababang pakanan",
"pababang pakanan na arrow",
"timog-silangan",
],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("pílur sum peikar niðureftir og til høgru"),
keywords: &[
"k´ós",
"landsynningur",
"pílur",
"pílur sum peikar niðureftir og til høgru",
"ætt",
],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèche bas droite"),
keywords: &["direction", "flèche", "flèche bas droite", "sud-est"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("flèche pointant vers le bas à droite"),
keywords: &[
"direction",
"en bas à droite",
"flèche pointant vers le bas à droite",
"lèche",
"point intercardinal",
"sud-est",
],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("saighead síos ar dheis"),
keywords: &[
"idirmheánach",
"saighead",
"saighead síos ar dheis",
"soir ó dheas",
],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("saighead gun ear-dheas"),
keywords: &[
"combaist",
"comhair",
"ear-dheas",
"saighead",
"saighead gun ear-dheas",
"àirde",
],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frecha cara abaixo á dereita"),
keywords: &[
"abaixo",
"dirección",
"frecha",
"frecha cara abaixo á dereita",
"sueste",
],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("નીચ\u{ac7}-જમણ\u{ac1}\u{a82} તીર"),
keywords: &[
"તીર",
"દક\u{acd}ષિણપ\u{ac2}ર\u{acd}વ",
"દિશા",
"નીચ\u{ac7}-જમણ\u{ac1}\u{a82} તીર",
],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("kibiyar ƙasa ta dama"),
keywords: &[
"a ƙetaren tsini shiyya",
"kibiya",
"kibiyar ƙasa ta dama",
"kudu maso gabas",
"shiyya",
],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("חץ למטה וימינה"),
keywords: &[
"דרום-מזרח",
"חץ",
"חץ למטה וימינה",
"חץ מטה וימינה",
"ימינה",
"כיוון",
"למטה",
],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("नीच\u{947}-दाया\u{901} तीर"),
keywords: &[
"इ\u{902}टरकार\u{94d}डिनल",
"दक\u{94d}षिण-प\u{942}र\u{94d}व दिशा",
"नीच\u{947}-दाया\u{901} तीर",
],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("strelica prema dolje-desno"),
keywords: &[
"jugoistok",
"smjer",
"strelica",
"strelica prema dolje-desno",
],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("jobbra lefelé mutató nyíl"),
keywords: &["délkelet", "irány", "jobbra lefelé mutató nyíl", "nyíl"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("ներքև աջ սլաք"),
keywords: &["հարավ-արևելք", "ներքև աջ սլաք", "ուղղություն", "սլաք"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah kanan bawah"),
keywords: &[
"arah",
"interkardinal",
"panah",
"tanda panah kanan bawah",
"tenggara",
],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("ube-ala akanri"),
keywords: &[
"intakadịnal",
"nduzi",
"ndịdaọwụwa anyanwụ",
"ube",
"ube-ala akanri",
],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("ör ská niður til hægri"),
keywords: &["suðaustur", "átt", "ör", "ör ská niður til hægri"],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("freccia in basso a destra"),
keywords: &[
"direzione",
"freccia",
"freccia in basso a destra",
"punto intercardinale",
"sud-est",
],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("右下矢印"),
keywords: &["南東", "右下", "右下矢印", "矢印"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("panah mudhun-nengen"),
keywords: &[
"arah",
"interkardinal",
"panah",
"panah mudhun-nengen",
"wetan-kidul",
],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ისარი ქვემოთ და მარჯვნივ"),
keywords: &[
"ინტერკარდინალური",
"ისარი",
"ისარი ქვემოთ და მარჯვნივ",
"კარდინალური",
"მიმართულება",
"სამხრეთ-აღმოსავლეთი",
],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("aneccab d akesser uẓẓil"),
keywords: &["aneccab d akesser uẓẓil"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("төменгі оң жақ көрсеткісі"),
keywords: &[
"бағыт",
"көрсеткі",
"оңтүстік-шығыс",
"румба аралық",
"төменгі оң жақ көрсеткісі",
],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("nedadvendt pil mod højre"),
keywords: &["nedadvendt pil mod højre", "pil", "retning", "sydøst"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some(
"ព\u{17d2}រ\u{17bd}ញទៅក\u{17d2}រោមងាកទៅស\u{17d2}តា\u{17c6}ក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}នជ\u{17d2}រ\u{17bb}ង",
),
keywords: &[
"ទ\u{17b7}ស",
"ទ\u{17b7}សដៅ",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញទៅក\u{17d2}រោមងាកទៅស\u{17d2}តា\u{17c6}ក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}នជ\u{17d2}រ\u{17bb}ង",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಕ\u{cc6}ಳಗ\u{cbf}ನ ಬಲ ಬಾಣ"),
keywords: &[
"ಆಗ\u{ccd}ನೇಯ",
"ಕ\u{cc6}ಳಗ\u{cbf}ನ ಬಲ ಬಾಣ",
"ದಕ\u{ccd}ಷ\u{cbf}ಣ",
"ಬಾಣದ ಗುರುತು",
],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("우하향 화살표"),
keywords: &["남동쪽", "우하향 화살표", "화살표"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("सकयल-उजवो बाण"),
keywords: &[
"आग\u{94d}न\u{947}य",
"उपदिशा",
"दिशा",
"बाण",
"सकयल-उजवो बाण",
],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("төмөн оң жакты караган жебе"),
keywords: &[
"багыт",
"жебе",
"түштүк-чыгыш",
"төмөн оң жакты караган жебе",
],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("Feil no ënnen a riets"),
keywords: &[
"Feil",
"Feil no ënnen a riets",
"Himmelsrichtung",
"Richtung",
"Südosten",
],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນລ\u{ebb}ງມ\u{eb8}ມຂວາ"),
keywords: &[
"ທ\u{eb4}ດທາງ",
"ມ\u{eb8}ມຂວາ",
"ລ\u{eb9}ກສອນ",
"ລ\u{eb9}ກສອນລ\u{ebb}ງມ\u{eb8}ມຂວາ",
"ລ\u{ebb}ງ",
],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("rodyklė į apačią ir į dešinę"),
keywords: &[
"kryptis",
"pietryčiai",
"rodyklė",
"rodyklė į apačią ir į dešinę",
],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("uz apakšējo labo stūri vērsta bultiņa"),
keywords: &[
"bultiņa",
"dienvidaustrumi",
"norāde",
"uz apakšējo labo stūri vērsta bultiņa",
],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pere iho-matau"),
keywords: &[
"ahunga",
"pere",
"pere iho-matau",
"rāwhiti-mā-tonga",
"waenga-matua",
],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("стрелка надолу десно"),
keywords: &["десно", "стрелка", "стрелка надолу десно", "југоисток"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some(
"ത\u{d3e}ഴോട\u{d4d}ട\u{d4d} വലത\u{d4d}തേക\u{d4d}ക\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളം",
),
keywords: &[
"അമ\u{d4d}പടയ\u{d3e}ളം",
"ഇന\u{d4d}റർക\u{d3e}ർഡിനൽ",
"ത\u{d3e}ഴോട\u{d4d}ട\u{d4d} വലത\u{d4d}തേക\u{d4d}ക\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളം",
"തെക\u{d4d}ക\u{d4d} കിഴക\u{d4d}ക\u{d4d}",
"ദിശ",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("баруун доошоо сум"),
keywords: &["баруун доошоо сум", "зүүн өмнөд", "сум", "чиглэл"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("खाली-उजवीकड\u{947} दर\u{94d}शविणारा बाण"),
keywords: &[
"अ\u{902}तर\u{94d}गत महत\u{94d}वाच\u{947}",
"खाली-उजवीकड\u{947} दर\u{94d}शविणारा बाण",
"दक\u{94d}षिणप\u{942}र\u{94d}व",
"दिशा",
"बाण",
],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("anak panah penjuru bawah kanan"),
keywords: &[
"anak panah",
"anak panah penjuru bawah kanan",
"antara kardinal",
"arah",
"tenggara",
],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("vleġġa ’l isfel leminija"),
keywords: &[
"direzzjoni",
"interkardinal",
"vleġġa",
"vleġġa ’l isfel leminija",
"xlokk",
],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("ညာဘက\u{103a}အောက\u{103a}ည\u{103d}\u{103e}န\u{103a} မြား"),
keywords: &[
"ညာဘက\u{103a}အောက\u{103a}ည\u{103d}\u{103e}န\u{103a} မြား",
"မြား",
"လမ\u{103a}းည\u{103d}\u{103e}န\u{103a}",
"အရပ\u{103a}မျက\u{103a}န\u{103e}ာ န\u{103e}စ\u{103a}ခ\u{102f}ကြားပြ အမ\u{103e}တ\u{103a}အသား",
"အရ\u{103e}ေ\u{1037}တောင\u{103a}",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("pil ned-høyre"),
keywords: &["pil", "pil ned-høyre", "retning", "sørøst"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("तल दाया\u{901}तर\u{94d}फको तीर"),
keywords: &[
"इन\u{94d}टरकार\u{94d}डिनल",
"तल दाया\u{901}तर\u{94d}फको तीर",
"तीर",
"दक\u{94d}षिण प\u{942}र\u{94d}व",
"दिशा",
],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("pijl rechtsomlaag"),
keywords: &[
"pijl",
"pijl rechtsomlaag",
"richting",
"windrichting",
"zuidoost",
],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("pil ned -høgre"),
keywords: &[
"pil",
"pil ned -høgre",
"pil ned-høgre",
"retning",
"søraust",
],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ତଳ-ଡ\u{b3e}ହ\u{b3e}ଣ ତୀର"),
keywords: &[
"ଅନ\u{b4d}ତଃପ\u{b4d}ରଧ\u{b3e}ନ",
"ତଳ-ଡ\u{b3e}ହ\u{b3e}ଣ ତୀର",
"ତୀର",
"ଦକ\u{b4d}ଷ\u{b3f}ଣପ\u{b42}ର\u{b4d}ବ",
"ଦ\u{b3f}ଗ",
],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਥ\u{a71}ਲ\u{a47}-ਸ\u{a71}ਜ\u{a47} ਤੀਰ"),
keywords: &[
"ਇ\u{a70}ਟਰਕਾਰਡੀਨਲ",
"ਤੀਰ",
"ਥ\u{a71}ਲ\u{a47}-ਸ\u{a71}ਜ\u{a47} ਤੀਰ",
"ਦਿਸ\u{a3c}ਾ",
"ਦ\u{a71}ਖਣ-ਪ\u{a42}ਰਬ",
],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("ہیٹھلا-سجا تیر"),
keywords: &["انٹر کارڈینل", "تیر", "جنوب مشرق", "سمت", "ہیٹھلا-سجا تیر"],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("Áro De Pọínt Daún-Rait"),
keywords: &[
"Dairẹ\u{301}kshọn",
"Intakádínal",
"Saútist",
"Áro",
"Áro De Pọínt Daún-Rait",
],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("strzałka w dół w prawo"),
keywords: &[
"kierunek",
"południowy wschód",
"strzałka",
"strzałka w dół w prawo",
],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("ښکته ښي غشی"),
keywords: &[
"انټر کارډينل",
"اړخ",
"سويلي ختيځ",
"غشی",
"ښکته ښي غشی",
"ښۍ ښکته غشی",
],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("seta para baixo e para a direita"),
keywords: &[
"direção",
"intercardinal",
"seta",
"seta para baixo e para a direita",
"sudeste",
],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("seta para a direita e para baixo"),
keywords: &[
"direção",
"intermédio",
"seta",
"seta para a direita e para baixo",
"sudeste",
],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("uran paña wach’i"),
keywords: &["uran paña wach’i"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("săgeată orientată în dreapta-jos"),
keywords: &[
"direcție",
"intercardinal",
"sud-est",
"săgeată",
"săgeată orientată în dreapta-jos",
],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-010"),
keywords: &["E10-010"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("стрелка вправо-вниз"),
keywords: &["вправо-вниз", "направление", "стрелка", "юго-восток"],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("umwambi wo hasi ugana iburyo"),
keywords: &[
"amajyepfo ashyira iburasirazuba",
"icyerekezo",
"kiri hagati y’ibyerekeezo bine",
"umwambi",
"umwambi wo hasi ugana iburyo",
],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("هيٺ ساڄي طرف تير"),
keywords: &["انٽرڪارڊينل", "تير", "طرف", "هيٺ ساڄي طرف تير", "ڏکڻ اوڀر"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("යට\u{dd2}-දක\u{dd4}ණ\u{dd4} ඊතලය"),
keywords: &[
"ඉන\u{dca}ටර\u{dca}ක\u{dcf}ඩ\u{dd2}නල\u{dca}",
"ඊතලය",
"ග\u{dd2}න\u{dd2}කොණ",
"ද\u{dd2}\u{dd2}ශ\u{dcf}ව",
"යට\u{dd2}-දක\u{dd4}ණ\u{dd4} ඊතලය",
],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("šípka doprava nadol"),
keywords: &[
"doprava nadol",
"juhovýchod",
"šípka",
"šípka doprava nadol",
],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("puščica desno dol"),
keywords: &[
"jugovzhod",
"puščica",
"puščica desno dol",
"smer",
"stransko",
],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("fallaarta hoose ee midig"),
keywords: &[
"fallaar",
"fallaarta hoose ee midig",
"isdhaafsiga jihooyinka",
"jihada",
"koonfur bari",
],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("shigjetë poshtë djathtas"),
keywords: &[
"drejtim",
"i ndërmjetëm",
"juglindje",
"shigjetë",
"shigjetë poshtë djathtas",
],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелица надоле и надесно"),
keywords: &["jугoистoк", "смер", "стрeлицa", "стрелица надоле и надесно"],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("стрелица надоље и надесно"),
keywords: &["стрелица надоље и надесно"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelica nadole i nadesno"),
keywords: &["jugoistok", "smer", "strelica", "strelica nadole i nadesno"],
},
#[cfg(feature = "sr_Latn_BA")]
crate::Annotation {
lang: "sr_Latn_BA",
tts: Some("strelica nadolje i nadesno"),
keywords: &["strelica nadolje i nadesno"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("nedåtpil höger"),
keywords: &["höger", "nedåt", "nedåtpil höger", "pil"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mshale unaoelekeza chini kulia"),
keywords: &[
"kaskazini kusini mashariki",
"kati ya sehemu kuu ya dira",
"mshale",
"mshale unaoelekeza chini kulia",
"uelekeo",
],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some("க\u{bc0}ழ\u{bcd}-வலது நோக\u{bcd}கிய அம\u{bcd}புக\u{bcd}குறி"),
keywords: &[
"க\u{bc0}ழ\u{bcd}-வலது நோக\u{bcd}கிய அம\u{bcd}புக\u{bcd}குறி",
"திசை",
"தென\u{bcd}கிழக\u{bcd}கு",
],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("ద\u{c3f}గువ కుడ\u{c3f} మూల బ\u{c3e}ణం"),
keywords: &[
"ఆగ\u{c4d}న\u{c47}యం",
"ద\u{c3f}గువ కుడ\u{c3f} మూల బ\u{c3e}ణం",
"ద\u{c3f}శ",
"బ\u{c3e}ణం",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("тири поёну рост"),
keywords: &["мобайнӣ", "самт", "тир", "тири поёну рост", "ҷанубу шарқ"],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("ล\u{e39}กศรช\u{e35}\u{e49}ม\u{e38}มขวาล\u{e48}าง"),
keywords: &[
"ตะว\u{e31}นออกเฉ\u{e35}ยงใต\u{e49}",
"ท\u{e34}ศทาง",
"ล\u{e39}กศร",
"ล\u{e39}กศรช\u{e35}\u{e49}ม\u{e38}มขวาล\u{e48}าง",
],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("ምልክት ናብ-የማናይ ታሕቲ"),
keywords: &[
"ምልክት",
"ምልክት ናብ-የማናይ ታሕቲ",
"ኣንፈት",
"ውሽጣዊ ናይ ካርዲናል",
"ደቡባዊ ምብራቅ",
],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("aşak-saga ok"),
keywords: &["aşak-saga ok", "gytak", "günorta-gündogar", "ok", "ugur"],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau ki lalo toʻomataʻu"),
keywords: &["lalo", "mataʻu", "ngahau", "ngahau ki lalo toʻomataʻu"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("sağ aşağı ok"),
keywords: &["ara yön", "güney doğu", "ok", "sağ aşağı ok", "yön"],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("ئاستى ئوڭ كۆرسەتكۈچ"),
keywords: &[
"ئارا يۆنىلىشلىك",
"ئاستى ئوڭ كۆرسەتكۈچ",
"شەرقىي-جەنۇب",
"كۆرسەتكۈچ",
"يۆنىلىش",
],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("стрілка вниз управо"),
keywords: &[
"напрям",
"проміжний",
"південно-східний",
"стрілка",
"стрілка вниз управо",
"стрілка вниз і праворуч",
],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("نیچے دائیں تیر"),
keywords: &["تیر", "جنوب مشرق", "سائن", "سمت", "نیچے دائیں تیر"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("o‘ng-pastga strelka"),
keywords: &["chiziq", "janubi-sharq", "o‘ng-pastga strelka", "yo‘nalish"],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("mũi tên xuống bên phải"),
keywords: &[
"hướng",
"mũi tên",
"mũi tên xuống bên phải",
"nhiều hướng",
"đông nam",
],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("suuf-ndijoor fett"),
keywords: &[
"direction",
"fett",
"interkàrdinal",
"penku-gànjool",
"suuf-ndijoor fett",
],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("utolo olujonge ezantsi"),
keywords: &[
"empuma mzantsi",
"ukhardinale ohlangeneyo",
"ulwalathiso",
"utolo",
"utolo olujonge ezantsi",
],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("àmì ilẹ\u{300} ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà apá ọ\u{300}tún"),
keywords: &[
"gúúsù ilà oòrùn",
"àmì ilẹ\u{300} ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà apá ọ\u{300}tún",
"àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà",
"ìlọ\u{301}pọ\u{300} kádínàlì",
"ìtọ\u{301}sọ\u{301}nà",
],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("向右下箭咀"),
keywords: &["向右下箭咀", "基點間", "方向", "東南", "箭咀"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("向右下箭咀"),
keywords: &["东南", "向右下箭咀", "基点间", "方向", "箭咀"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("右下箭头"),
keywords: &["东南", "右下箭头", "方向", "标识"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("右下箭頭"),
keywords: &["右下箭頭", "方向"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("下右箭嘴"),
keywords: &["下右箭嘴", "方向"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("inkombaphansi ngqo"),
keywords: &[
"eningizimunempumalanga",
"inkomba",
"inkombakuhlanganisa",
"inkombaphansi ngqo",
"umcibisholo",
],
},
],
};
#[doc = "⬇\u{fe0f}"]
pub const DOWN_ARROW: crate::Emoji = crate::Emoji {
glyph: "⬇\u{fe0f}",
codepoint: "2B07 FE0F",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "down arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[crate::Emoji {
glyph: "⬇",
codepoint: "2B07",
status: crate::Status::Unqualified,
introduction_version: 0.6f32,
name: "down arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: true,
variants: &[],
annotations: &[],
}],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("pyl na onder"),
keywords: &["af", "pyl", "pyl na onder", "rigting", "suid"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("ወደ ታች ጠቋሚ ቀስት"),
keywords: &["ቀስት", "ታች", "አቅጣጫ", "ካርዲናል", "ወደ ታች ጠቋሚ ቀስት", "ደቡብ"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("سهم لأسفل"),
keywords: &["اتجاه", "جنوب", "سهم", "سهم لأسفل", "كاردينال"],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some("অধোম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}"),
keywords: &[
"অধোম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}",
"ক\u{9be}\u{981}ড\u{9bc}",
"তল",
"দক\u{9cd}ষিণ",
"দিশ",
"ম\u{9c1}খ\u{9cd}য দিশ",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("üzüaşağı ox"),
keywords: &["aşağı", "cənub", "istiqamət", "ox", "üzüaşağı ox", "əsas"],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("стрэлка ўніз"),
keywords: &["кірунак", "поўдзень", "стрэлка", "стрэлка ўніз", "уніз"],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("Стрелка надолу"),
keywords: &["Стрелка надолу", "надолу", "посока", "стрелка", "юг"],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("নিচের দিকের তীর"),
keywords: &[
"তীর",
"দক\u{9cd}ষিণ",
"দিক",
"নিচে",
"নিচের দিকের তীর",
"পরিম\u{9be}ণব\u{9be}চক",
],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelica nadolje"),
keywords: &["jug", "smjer", "strelica", "strelica nadolje"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("fletxa cap avall"),
keywords: &["avall", "direcció", "fletxa", "fletxa cap avall", "sud"],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎡᎳᏗ ᎦᏝᏗ"),
keywords: &["ᎡᎳᏗ", "ᎤᎦᏅᏮ", "ᎦᏝᏗ", "ᏂᏚᏳᎪᏛᎢ", "ᏧᎵᏍᎨᏓ"],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("šipka dolů"),
keywords: &[
"hlavní",
"jih",
"směr",
"strany",
"světové",
"šipka",
"šipka dolů",
],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("saeth i lawr"),
keywords: &["De", "cyfeiriad", "saeth", "saeth i lawr"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("nedadvendt pil"),
keywords: &[
"ned",
"nedadvendt pil",
"pil",
"retning",
"syd",
"verdenshjørne",
],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("Pfeil nach unten"),
keywords: &[
"Abwärtspfeil",
"Pfeil",
"Pfeil nach unten",
"Süden",
"abwärts",
"nach unten",
],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("κάτω βέλος"),
keywords: &["απόλυτη", "βέλος", "κάτω", "κατεύθυνση", "νότια"],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("down arrow"),
keywords: &["arrow", "cardinal", "direction", "down", "south"],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flecha hacia abajo"),
keywords: &["abajo", "dirección", "flecha", "flecha hacia abajo", "sur"],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("↑↑↑"),
keywords: &[
"cardinal",
"dirección",
"flecha",
"flecha hacia abajo",
"flecha hacia el sur",
"punto",
],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &[
"abajo",
"cardinal",
"dirección",
"flecha",
"flecha hacia abajo",
"sur",
],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("nool alla"),
keywords: &["lõuna", "nool", "nool alla", "suund"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("behera gezia"),
keywords: &["behera", "gezia", "hegoa", "kardinala", "norabidea"],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("پیکان پایین"),
keywords: &["جنوب", "جهت اصلی", "مسیر", "پایین", "پیکان"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("nuoli alas"),
keywords: &[
"etelä",
"ilmansuunta",
"nuoli",
"nuoli alas",
"pääilmansuunta",
],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("pababang arrow"),
keywords: &[
"arrow",
"cardinal",
"direksyon",
"pababa",
"pababang arrow",
"timog",
],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("pílur sum peikar niðureftir"),
keywords: &[
"k´ós",
"niður",
"pílur",
"pílur sum peikar niðureftir",
"suður",
"ætt",
],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèche bas"),
keywords: &["direction", "flèche", "flèche bas", "sud"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("flèche pointant vers le bas"),
keywords: &[
"direction",
"en bas",
"flèche",
"flèche pointant vers le bas",
"point cardinal",
"sud",
],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("saighead síos"),
keywords: &["príomhaird", "saighead", "síos", "treo", "ó dheas"],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("saighead gu deas"),
keywords: &[
"combaist",
"comhair",
"deas",
"saighead",
"saighead gu deas",
"àirde",
],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frecha cara abaixo"),
keywords: &["abaixo", "dirección", "frecha cara abaixo", "sur"],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("નીચ\u{ac7} તીર"),
keywords: &["તીર", "દક\u{acd}ષિણ", "દિશા", "નીચ\u{ac7}"],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("kibiyar ƙasa"),
keywords: &[
"kibiya",
"kibiyar ƙasa",
"kudu",
"shiyya",
"tsinin shiyya",
"ƙasa",
],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("חץ למטה"),
keywords: &["דרום", "חץ", "כיוון", "למטה"],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("नीच\u{947} तीर"),
keywords: &[
"कार\u{94d}डिनल",
"दक\u{94d}षिण दिशा",
"नीच\u{947}",
"नीच\u{947} तीर",
],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("strelica prema dolje"),
keywords: &["jug", "smjer", "strelica", "strelica prema dolje"],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("lefelé mutató nyíl"),
keywords: &["dél", "irány", "lefelé", "lefelé mutató nyíl", "nyíl"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("ներքև սլաք"),
keywords: &["գլխավոր", "հարավ", "ներքև", "ուղղություն", "սլաք"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah bawah"),
keywords: &[
"arah",
"bawah",
"kardinal",
"panah",
"selatan",
"tanda panah bawah",
],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("ube ala"),
keywords: &["ala", "kadịnal", "nduzi", "ndịda", "ube"],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("ör niður"),
keywords: &["höfuðátt", "niður", "suður", "átt", "ör"],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("freccia rivolta verso il basso"),
keywords: &[
"direzione",
"freccia",
"freccia in basso",
"freccia rivolta verso il basso",
"punto cardinale",
"sud",
],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("下矢印"),
keywords: &["下", "下矢印", "南", "矢印"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("panah mudhun"),
keywords: &["arah", "kardinal", "kidul", "panah", "panah mudhun"],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ისარი ქვემოთ"),
keywords: &["ისარი", "კარდინალური", "მიმართულება", "სამხრეთი", "ქვემოთ"],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("aneccab akesser"),
keywords: &["aneccab akesser"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("төмен қараған көрсеткі"),
keywords: &[
"бағыт",
"көрсеткі",
"негізгі",
"оңтүстік",
"төмен",
"төмен бағытты көрсеткі",
"төмен қараған көрсеткі",
],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("nedadvendt pil"),
keywords: &[
"ned",
"nedadvendt pil",
"pil",
"retning",
"syd",
"verdenshjørne",
],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some("ព\u{17d2}រ\u{17bd}ញទៅក\u{17d2}រោម"),
keywords: &[
"ច\u{17bb}ះក\u{17d2}រោម",
"ត\u{17d2}ប\u{17bc}ង",
"ទ\u{17b7}ស",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញទៅក\u{17d2}រោម",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಕ\u{cc6}ಳಮುಖ ಬಾಣ"),
keywords: &[
"ಕ\u{cc6}ಳಗ\u{cbf}ನ ಬಾಣ",
"ಕ\u{cc6}ಳಗ\u{cc6}",
"ಕ\u{cc6}ಳಮುಖ ಬಾಣ",
"ದ\u{cbf}ಕ\u{ccd}ಕು",
"ನ\u{cbf}ರ\u{ccd}ದೇಶನ",
"ಬಾಣದ ಗುರುತು",
],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("하향 화살표"),
keywords: &["남쪽", "하향 화살표", "화살표"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("सकयल बाण"),
keywords: &[
"दक\u{94d}षिण",
"दिशा",
"बाण",
"म\u{941}ख\u{947}लदिशा",
"सकयल",
],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("төмөн караган жебе"),
keywords: &["багыт", "жебе", "түштүк", "төмөн", "төмөн караган жебе"],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("Feil no ënnen"),
keywords: &[
"Feil",
"Feil no ënnen",
"Himmelsrichtung",
"Richtung",
"Süden",
"no ënnen",
],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນລ\u{ebb}ງ"),
keywords: &[
"ທ\u{eb4}ດທາງ",
"ລ\u{eb9}ກສອນ",
"ລ\u{eb9}ກສອນລ\u{ebb}ງ",
"ລ\u{ebb}ງ",
],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("rodyklė į apačią"),
keywords: &[
"koordinatė",
"kryptis",
"pietūs",
"rodyklė",
"rodyklė į apačią",
"į apačią",
],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("lejupbultiņa"),
keywords: &[
"bultiņa",
"dienvidi",
"lejupbultiņa",
"lejupvērsta bultiņa",
"norāde",
],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pere iho"),
keywords: &["ahunga", "iho", "matua", "pere", "tonga"],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("стрелка надолу"),
keywords: &["долу", "стрелка", "стрелка надолу", "југ"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some(
"ത\u{d3e}ഴേക\u{d4d}ക\u{d4d} ച\u{d42}ണ\u{d4d}ട\u{d41}ന\u{d4d}ന അമ\u{d4d}പടയ\u{d3e}ളം",
),
keywords: &[
"അമ\u{d4d}പടയ\u{d3e}ളം",
"ക\u{d3e}ർഡിനൽ",
"ത\u{d3e}ഴേക\u{d4d}ക\u{d4d}",
"ത\u{d3e}ഴേക\u{d4d}ക\u{d4d} ച\u{d42}ണ\u{d4d}ട\u{d41}ന\u{d4d}ന അമ\u{d4d}പടയ\u{d3e}ളം",
"തെക\u{d4d}ക\u{d4d}",
"ദിശ",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("доошоо сум"),
keywords: &["доошоо", "кардинал", "сум", "чиглэл", "өмнөд"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("खाली दर\u{94d}शविणारा बाण"),
keywords: &[
"खाली दर\u{94d}शविणारा बाण",
"बाण । महत\u{94d}त\u{94d}वाच\u{947} । दिशा । खाली । दक\u{94d}षिण",
],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("anak panah ke bawah"),
keywords: &[
"anak panah",
"anak panah ke bawah",
"arah",
"kardinal",
"ke bawah",
"selatan",
],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("vleġġa ’l isfel"),
keywords: &[
"direzzjoni",
"kardinal",
"nofsinhar",
"vleġġa",
"vleġġa ’l isfel",
"’l isfel",
],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("အောက\u{103a}ည\u{103d}\u{103e}န\u{103a}မြား"),
keywords: &[
"တောင\u{103a}ဘက\u{103a}",
"မြား",
"လမ\u{103a}းည\u{103d}\u{103e}န\u{103a}",
"အရပ\u{103a}မျက\u{103a}န\u{103e}ာပြ အမ\u{103e}တ\u{103a}အသား",
"အောက\u{103a}ည\u{103d}\u{103e}န\u{103a}မြား",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("pil ned"),
keywords: &["nedoverpil", "pil", "pil ned", "retning", "sør"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("तलतर\u{94d}फको तीर"),
keywords: &[
"कार\u{94d}डिनल",
"तल",
"तलतर\u{94d}फको तीर",
"तीर",
"दक\u{94d}षिण",
"दिशा",
],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("pijl omlaag"),
keywords: &["omlaag", "pijl", "richting", "windrichting", "zuid"],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("pil ned"),
keywords: &["nedoverpil", "pil", "pil ned", "retning", "sør"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ନ\u{b3f}ମ\u{b4d}ନମ\u{b41}ଖୀ ତୀର"),
keywords: &[
"ତୀର",
"ଦକ\u{b4d}ଷ\u{b3f}ଣ",
"ଦ\u{b3f}ଗ",
"ନ\u{b3f}ମ\u{b4d}ନ",
"ନ\u{b3f}ମ\u{b4d}ନମ\u{b41}ଖୀ ତୀର",
"ପ\u{b4d}ରମ\u{b41}ଖ",
],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਥ\u{a71}ਲ\u{a47}\u{a47} ਤੀਰ"),
keywords: &[
"ਕਾਰਡੀਨਲ",
"ਤੀਰ",
"ਥ\u{a71}ਲ\u{a47}",
"ਥ\u{a71}ਲ\u{a47}\u{a47} ਤੀਰ",
"ਦਿਸ\u{a3c}ਾ",
"ਦ\u{a71}ਖਣ",
],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("ہیٹھلا تیر"),
keywords: &["اہم", "تیر", "جنوب", "سمت", "ہیٹھاں", "ہیٹھلا تیر"],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("Áro De Pọínt Daun"),
keywords: &[
"Dairẹ\u{301}kshọn",
"Daun",
"Kádínal",
"Saut",
"Áro",
"Áro De Pọínt Daun",
],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("strzałka w dół"),
keywords: &[
"kierunek",
"południe",
"strzałka",
"strzałka na dół",
"strzałka w dół",
],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("ښکته غشی"),
keywords: &["اساسي", "اړخ", "سويل", "غشی", "ښکته"],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("seta para baixo"),
keywords: &[
"cardinal",
"direção",
"para baixo",
"seta",
"seta para baixo",
"sul",
],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("↑↑↑"),
keywords: &[
"cardeal",
"direção",
"para baixo",
"seta",
"seta para baixo",
"sul",
],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("uran wach’i"),
keywords: &["uran wach’i"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("săgeată orientată în jos"),
keywords: &[
"cardinal",
"direcție",
"jos",
"sud",
"săgeată",
"săgeată orientată în jos",
],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-158"),
keywords: &["E10-158"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("стрелка вниз"),
keywords: &["вниз", "направление", "стрелка", "юг"],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("akambi kamanura"),
keywords: &[
"akambi kamanura",
"amajyepfo",
"hasi",
"icyerekezo",
"kimwe mu byerekezo bine",
"umwambi",
],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("هيٺ تير"),
keywords: &["تير", "طرف", "هيٺ", "ڏکڻ", "ڪارڊينل"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("යට\u{dd2} ඊතලය"),
keywords: &[
"ඊතලය",
"ක\u{dcf}ඩ\u{dd2}නල\u{dca}",
"දක\u{dd4}ණ",
"ද\u{dd2}ශ\u{dcf}ව",
"පහළට",
"යට\u{dd2} ඊතලය",
],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("šípka nadol"),
keywords: &["dolu", "juh", "nadol", "šípka"],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("puščica dol"),
keywords: &["glavno", "jug", "navzdol", "puščica", "puščica dol", "smer"],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("fallaarta hoose"),
keywords: &[
"afarta jiho",
"fallaar",
"fallaarta hoose",
"hoos",
"jiho",
"koonfur",
],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("shigjetë poshtë"),
keywords: &["drejtim", "jug", "kryesore", "poshtë", "shigjetë"],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелица надоле"),
keywords: &["jуг", "смер", "стрeлицa", "стрелица надоле"],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("стрелица надоље"),
keywords: &["стрелица надоље"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelica nadole"),
keywords: &["jug", "smer", "strelica", "strelica nadole"],
},
#[cfg(feature = "sr_Latn_BA")]
crate::Annotation {
lang: "sr_Latn_BA",
tts: Some("strelica nadolje"),
keywords: &["strelica nadolje"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("nedåtpil"),
keywords: &[
"nedåt",
"nedåtpil",
"pil",
"riktning",
"söder",
"väderstreck",
],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mshale unaoangalia chini"),
keywords: &[
"chini",
"kusini",
"mshale",
"mshale unaoangalia chini",
"mshale unaoelekea chini",
"sehemu kuu ya dira",
"uelekeo",
],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some("க\u{bc0}ழ\u{bcd} நோக\u{bcd}கிய அம\u{bcd}புக\u{bcd}குறி"),
keywords: &[
"அம\u{bcd}புக\u{bcd}குறி",
"க\u{bbe}ர\u{bcd}டினல\u{bcd}",
"க\u{bc0}ழ\u{bcd} நோக\u{bcd}கிய அம\u{bcd}புக\u{bcd}குறி",
"க\u{bc0}ழ\u{bcd}நோக\u{bcd}கிய",
"திசை",
"தெற\u{bcd}கு",
],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("ద\u{c3f}గువ బ\u{c3e}ణం"),
keywords: &[
"క\u{c3e}ర\u{c4d}డ\u{c3f}నల\u{c4d}",
"దక\u{c4d}ష\u{c3f}ణం",
"ద\u{c3f}గువ",
"ద\u{c3f}శ",
"బ\u{c3e}ణం",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("тири поён"),
keywords: &["асосӣ", "поён", "самт", "тир", "тири поён", "ҷануб"],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("ล\u{e39}กศรช\u{e35}\u{e49}ลง"),
keywords: &[
"ท\u{e34}ศทาง",
"ลง",
"ล\u{e39}กศร",
"ล\u{e39}กศรช\u{e35}\u{e49}ลง",
"ใต\u{e49}",
],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("ምልክት ናብ ታሕቲ"),
keywords: &["ምልክት", "ምልክት ናብ ታሕቲ", "ታሕቲ", "ኣንፈት", "ካአዲናል", "ደቡብ"],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("aşak ok"),
keywords: &["aşak", "göni", "günorta", "ok", "ugur"],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau ki lalo"),
keywords: &["hifo", "lalo", "ngahau", "ngahau ki lalo"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("aşağı ok"),
keywords: &["ana yön", "aşağı", "aşağı yönlü ok", "güney", "ok", "yön"],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("ئاستى كۆرسەتكۈچ"),
keywords: &["ئاساس", "ئاستى", "جەنۇب", "كۆرسەتكۈچ", "يۆنىلىش"],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("стрілка вниз"),
keywords: &["напрям", "південний", "сторона", "стрілка", "стрілка вниз"],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("نیچے کا تیر"),
keywords: &["تیر", "جنوب", "سمت", "نیچے", "نیچے کا تیر", "کارڈینل"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("pastga strelka"),
keywords: &["janub", "pastga", "strelka", "yo‘nalish"],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("mũi tên xuống"),
keywords: &["chính", "hướng", "mũi tên", "mũi tên xuống", "nam", "xuống"],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("fettu suuf"),
keywords: &[
"direction",
"fett",
"fettu suuf",
"gànjool",
"kàrdinal",
"suuf",
],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("utolo olusezantsi"),
keywords: &[
"emzantsi",
"ezantsi",
"ukhardinale",
"ulwalathiso",
"utolo",
"utolo olusezantsi",
],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("àmì ilẹ\u{300} ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà"),
keywords: &[
"gúúsù",
"ilè\u{329}",
"kádínàlì",
"àmì ilẹ\u{300} ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà",
"àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà",
"ìtọ\u{301}sọ\u{301}nà",
],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("向下箭咀"),
keywords: &["南", "向下", "向下箭咀", "基點", "方向", "箭咀"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("向下箭咀"),
keywords: &["南", "向下", "向下箭咀", "基点", "方向", "箭咀"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("向下箭头"),
keywords: &["向下", "向下箭头", "基本", "方位", "正南", "箭头"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("向下箭頭"),
keywords: &["向下箭頭", "方向"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("下箭嘴"),
keywords: &["下箭嘴", "方向"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("umcibisholo opheke phansi"),
keywords: &[
"inkomba",
"ningizimu",
"phansi",
"umcibisholo",
"umcibisholo opheke phansi",
],
},
],
};
#[doc = "↙\u{fe0f}"]
pub const DOWN_LEFT_ARROW: crate::Emoji = crate::Emoji {
glyph: "↙\u{fe0f}",
codepoint: "2199 FE0F",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "down-left arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[crate::Emoji {
glyph: "↙",
codepoint: "2199",
status: crate::Status::Unqualified,
introduction_version: 0.6f32,
name: "down-left arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: true,
variants: &[],
annotations: &[],
}],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("pyl na links onder"),
keywords: &["pyl", "pyl na links onder", "rigting", "suidwes"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("ወደ ታች ግራ ጠቋሚ ቀስት"),
keywords: &["ቀስት", "አቅጣጫ", "ኢንተርካርዲናል", "ወደ ታች ግራ ጠቋሚ ቀስት", "ደቡብ ምዕራብ"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("سهم لأسفل اليسار"),
keywords: &["اتجاه", "جنوب غرب", "سهم", "سهم لأسفل اليسار", "كاردينال"],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some("অধোগ\u{9be}মী ব\u{9be}ও\u{981}ম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}"),
keywords: &[
"অধোগ\u{9be}মী ব\u{9be}ও\u{981}ম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}",
"আন\u{9cd}তঃদিশ",
"ক\u{9be}\u{981}ড\u{9bc}",
"দক\u{9cd}ষিণ-পশ\u{9cd}চিম",
"দিশ",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("üzüaşağı sola yönəlmiş ox"),
keywords: &[
"cənub qərb",
"interkardinal",
"istiqamət",
"ox",
"üzüaşağı sola yönəlmiş ox",
],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("стрэлка ўніз-улева"),
keywords: &[
"кірунак",
"паўднёвы захад",
"стрэлка",
"стрэлка ўніз-улева",
"уніз-улева",
],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("стрелка надолу и наляво"),
keywords: &["посока", "стрелка", "стрелка надолу и наляво", "югозапад"],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("নীচের ব\u{9be}মে তীর"),
keywords: &[
"আন\u{9cd}তঃ দিগনির\u{9cd}ণয\u{9bc}",
"তীর",
"দক\u{9cd}ষিণ-পশ\u{9cd}চিম",
"দিক",
"নীচের ব\u{9be}মে তীর",
],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelica dolje-lijevo"),
keywords: &["jugozapad", "smjer", "strelica", "strelica dolje-lijevo"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("fletxa cap avall a l’esquerra"),
keywords: &[
"avall a l’esquerra",
"direcció",
"fletxa",
"fletxa cap avall a l’esquerra",
"sud-oest",
],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎡᎳᏗ-ᎠᎦᏍᎦᏂ ᎦᏝᏗ"),
keywords: &[
"ᎡᎳᏗ-ᎠᎦᏍᎦᏂ ᎦᏝᏗ",
"ᎤᎦᏅᏮᎤᏕᎵᎬ",
"ᎦᏝᏗ",
"ᏂᏚᏳᎪᏛᎢ",
"ᏅᎩᏂᏚᏳᎪᏛᎢ ᏂᏚᏓᎸᏗᏎᎯᎯ",
],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("šipka doleva dolů"),
keywords: &[
"jihozápad",
"směr",
"strany",
"světové",
"vedlejší",
"šipka",
"šipka doleva dolů",
],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("saeth i lawr-chwith"),
keywords: &["De-orllewin", "cyfeiriad", "saeth", "saeth i lawr-chwith"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("nedadvendt pil mod venstre"),
keywords: &["nedadvendt pil mod venstre", "pil", "retning", "sydvest"],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("Pfeil nach links unten"),
keywords: &["Pfeil", "Pfeil nach links unten", "nach links unten"],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("κάτω αριστερό βέλος"),
keywords: &[
"βέλος",
"ενδιάμεση",
"κάτω αριστερό βέλος",
"κατεύθυνση",
"νοτιοδυτικά",
],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("down-left arrow"),
keywords: &[
"arrow",
"direction",
"down-left arrow",
"intercardinal",
"southwest",
],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flecha hacia la esquina inferior izquierda"),
keywords: &[
"abajo",
"dirección",
"flecha",
"flecha hacia la esquina inferior izquierda",
"izquierda",
"suroeste",
],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("flecha hacia el suroeste"),
keywords: &["flecha hacia el suroeste"],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("nool alla vasakule"),
keywords: &["edel", "nool", "nool alla vasakule", "suund"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("behera eta ezkerrera gezia"),
keywords: &[
"behera eta ezkerrera gezia",
"gezi",
"hego-mendebalde",
"kardinal arteko",
"norabide",
],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("پیکان پایین چپ"),
keywords: &["جنوب غربی", "جهت فرعی", "مسیر", "پیکان", "پیکان پایین چپ"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("nuoli alavasemmalle"),
keywords: &[
"ilmansuunta",
"lounas",
"nuoli",
"nuoli alavasemmalle",
"väli-ilmansuunta",
],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("pababang pakaliwang arrow"),
keywords: &[
"arrow",
"direksyon",
"intercardinal",
"pababa",
"pababang pakaliwang arrow",
"pakaliwa",
"timog-kanluran",
],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("pílur sum peikar niðureftir og til vinstru"),
keywords: &[
"k´ós",
"pílur",
"pílur sum peikar niðureftir og til vinstru",
"ætt",
"útsynningur",
],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèche bas gauche"),
keywords: &["direction", "flèche", "flèche bas gauche", "sud-ouest"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("flèche pointant vers le bas à gauche"),
keywords: &[
"direction",
"en bas à gauche",
"flèche",
"flèche pointant vers le bas à gauche",
"point intercardinal",
"sud-ouest",
],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("saighead síos ar chlé"),
keywords: &[
"idirbunuimhir",
"saighead",
"saighead síos ar chlé",
"thiar theas",
"treo",
],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("saighead gun iar-dheas"),
keywords: &[
"combaist",
"comhair",
"iar-dheas",
"saighead",
"saighead gun iar-dheas",
"àirde",
],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frecha cara abaixo á esquerda"),
keywords: &[
"abaixo",
"dirección",
"frecha",
"frecha cara abaixo á esquerda",
"suroeste",
],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("નીચ\u{ac7}-ડાબ\u{ac1}\u{a82} તીર"),
keywords: &[
"તીર",
"દક\u{acd}ષિણપશ\u{acd}ચિમ",
"નીચ\u{ac7}-ડાબ\u{ac1}\u{a82} તીર",
],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("kibiyar ƙasa ta hagu"),
keywords: &[
"a ƙetaren tsini shiyya",
"kibiya",
"kibiyar ƙasa ta hagu",
"kudu maso yamma",
"shiyya",
],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("חץ למטה ושמאלה"),
keywords: &[
"דרום-מערב",
"חץ",
"חץ למטה ושמאלה",
"כיוון",
"למטה",
"שמאלה",
],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("नीच\u{947}-बाया\u{901} तीर"),
keywords: &[
"इ\u{902}टरकार\u{94d}डिनल",
"दक\u{94d}षिण-पश\u{94d}चिम दिशा",
"नीच\u{947}-बाया\u{901} तीर",
],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("strelica prema dolje-lijevo"),
keywords: &[
"jugozapad",
"smjer",
"strelica",
"strelica prema dolje-lijevo",
],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("balra lefelé mutató nyíl"),
keywords: &["balra lefelé mutató nyíl", "délnyugat", "irány", "nyíl"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("ներքև ձախ սլաք"),
keywords: &["հարավ-արևմուտք", "ներքև ձախ սլաք", "ուղղություն", "սլաք"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah kiri bawah"),
keywords: &[
"arah",
"barat daya",
"interkardinal",
"panah",
"tanda panah kiri bawah",
],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("ube-ala akaekpe"),
keywords: &[
"intakadịnal",
"nduzi",
"ndịdaọdịda anyanwụ",
"ube",
"ube-ala akaekpe",
],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("ör ská niður til vinstri"),
keywords: &["suðvestur", "átt", "ör", "ör ská niður til vinstri"],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("freccia in basso a sinistra"),
keywords: &[
"direzione",
"freccia",
"freccia in basso a sinistra",
"punto intercardinale",
"sud-ovest",
],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("左下矢印"),
keywords: &["南西", "左下", "左下矢印", "矢印"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("panah mudhun-ngiwa"),
keywords: &[
"arah",
"interkardinal",
"kidul-kulon",
"panah",
"panah mudhun-ngiwa",
],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ისარი ქვემოთ და მარცხნივ"),
keywords: &[
"ინტერკარდინალური",
"ისარი",
"ისარი ქვემოთ და მარცხნივ",
"კარდინალური",
"მიმართულება",
"სამხრეთ-დასავლეთი",
],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("aneccab akesser ayeffus"),
keywords: &["aneccab akesser ayeffus"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("төменгі сол жақ көрсеткісі"),
keywords: &[
"бағыт",
"көрсеткі",
"оңтүстік-батыс",
"румба аралық",
"төменгі сол жақ көрсеткісі",
],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("nedadvendt pil mod venstre"),
keywords: &["nedadvendt pil mod venstre", "pil", "retning", "sydvest"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some(
"ព\u{17d2}រ\u{17bd}ញទៅក\u{17d2}រោមងាកទៅឆ\u{17d2}វេងក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}នជ\u{17d2}រ\u{17bb}ង",
),
keywords: &[
"ទ\u{17b7}ស",
"ទ\u{17b7}សដៅ",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញទៅក\u{17d2}រោមងាកទៅឆ\u{17d2}វេងក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}នជ\u{17d2}រ\u{17bb}ង",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಕ\u{cc6}ಳಗ\u{cbf}ನ ಎಡ ಬಾಣ"),
keywords: &[
"ಆಗ\u{ccd}ನೇಯ",
"ಕ\u{cc6}ಳಗ\u{cbf}ನ ಎಡ ಬಾಣ",
"ದ\u{cbf}ಕ\u{ccd}ಕು",
"ಬಾಣದ ಗುರುತು",
],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("좌하향 화살표"),
keywords: &["남서쪽", "좌하향 화살표", "화살표"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("सकयल-दावो बाण"),
keywords: &[
"उपदिशा",
"दिशा",
"न\u{948}ऋत\u{94d}य",
"बाण",
"सकयल-दावो बाण",
],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("төмөн сол жакты караган жебе"),
keywords: &[
"багыт",
"жебе",
"түштүк-батыш",
"төмөн сол жакты караган жебе",
],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("Feil no ënnen a lénks"),
keywords: &[
"Feil",
"Feil no ënnen a lénks",
"Himmelsrichtung",
"Richtung",
"Südwesten",
],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນລ\u{ebb}ງມ\u{eb8}ມຊ\u{ec9}າຍ"),
keywords: &[
"ທ\u{eb4}ດທາງ",
"ມ\u{eb8}ມຊ\u{ec9}າຍ",
"ລ\u{eb9}ກສອນ",
"ລ\u{eb9}ກສອນລ\u{ebb}ງມ\u{eb8}ມຊ\u{ec9}າຍ",
"ລ\u{ebb}ງ",
],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("rodyklė į apačią ir į kairę"),
keywords: &[
"kryptis",
"pietvakariai",
"rodyklė",
"rodyklė į apačią ir į kairę",
],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("uz apakšējo kreiso stūri vērsta bultiņa"),
keywords: &[
"bultiņa",
"dienvidrietumi",
"norāde",
"uz apakšējo kreiso stūri vērsta bultiņa",
],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pere iho-mauī"),
keywords: &[
"ahunga",
"pere",
"pere iho-mauī",
"uru-mā-tonga",
"waenga-matua",
],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("стрелка надолу лево"),
keywords: &["долу", "стрелка", "стрелка надолу лево", "југозапад"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some(
"ത\u{d3e}ഴോട\u{d4d}ട\u{d4d} ഇടത\u{d4d}തേക\u{d4d}ക\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളം",
),
keywords: &[
"അമ\u{d4d}പടയ\u{d3e}ളം",
"ഇന\u{d4d}റർക\u{d3e}ർഡിനൽ",
"ത\u{d3e}ഴോട\u{d4d}ട\u{d4d} ഇടത\u{d4d}തേക\u{d4d}ക\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളം",
"തെക\u{d4d}ക\u{d4d} പടിഞ\u{d4d}ഞ\u{d3e}റൻ",
"ദിശ",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("зүүн доошоо сум"),
keywords: &["баруун урд", "зүүн доошоо сум", "сум", "чиглэл"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("खाली-डावीकड\u{947} दर\u{94d}शविणारा बाण"),
keywords: &[
"अ\u{902}तर\u{94d}गत महत\u{94d}वाच\u{947}",
"खाली-डावीकड\u{947} दर\u{94d}शविणारा बाण",
"दक\u{94d}षिणपश\u{94d}चिम",
"दिशा",
"बाण",
],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("anak panah ke penjuru kiri bawah"),
keywords: &[
"anak panah",
"anak panah ke penjuru kiri bawah",
"antara kardinal",
"arah",
"barat daya",
],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("vleġġa xellugija ’l isfel"),
keywords: &[
"direzzjoni",
"interkardinal",
"lbiċ",
"vleġġa",
"vleġġa xellugija ’l isfel",
],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("ဘယ\u{103a}ဘက\u{103a}အောက\u{103a}ည\u{103d}\u{103e}န\u{103a} မြား"),
keywords: &[
"ဘယ\u{103a}ဘက\u{103a}အောက\u{103a}ည\u{103d}\u{103e}န\u{103a} မြား",
"မြား",
"လမ\u{103a}းည\u{103d}\u{103e}န\u{103a}",
"အနောက\u{103a}တောင\u{103a}",
"အရပ\u{103a}မျက\u{103a}န\u{103e}ာ န\u{103e}စ\u{103a}ခ\u{102f}ကြားပြ အမ\u{103e}တ\u{103a}အသား",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("pil ned-venstre"),
keywords: &["pil", "pil ned-venstre", "retning", "sørvest"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("तल बाया\u{901}तर\u{94d}फको तीर"),
keywords: &[
"इन\u{94d}टरकार\u{94d}डिनल",
"तल बाया\u{901}तर\u{94d}फको तीर",
"तीर",
"दक\u{94d}षिण प\u{942}र\u{94d}व",
"दिशा",
],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("pijl linksomlaag"),
keywords: &["linksbeneden", "linksomlaag", "pijl", "zuidwest"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ତଳ-ବ\u{b3e}ମ ତୀର"),
keywords: &[
"ଅନ\u{b4d}ତଃପ\u{b4d}ରଧ\u{b3e}ନ",
"ତଳ-ବ\u{b3e}ମ ତୀର",
"ତୀର",
"ଦକ\u{b4d}ଷ\u{b3f}ଣପଶ\u{b4d}ଚ\u{b3f}ମ",
"ଦ\u{b3f}ଗ",
],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਥ\u{a71}ਲ\u{a47}-ਖ\u{a71}ਬ\u{a47} ਤੀਰ"),
keywords: &[
"ਇ\u{a70}ਟਰਕਾਰਡੀਨਲ",
"ਤੀਰ",
"ਥ\u{a71}ਲ\u{a47}-ਖ\u{a71}ਬ\u{a47} ਤੀਰ",
"ਦਿਸ\u{a3c}ਾ",
],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("ہیٹھلا-کھبا تیر"),
keywords: &["انٹر کارڈینل", "تیر", "جنوب مغرب", "سمت", "ہیٹھلا-کھبا تیر"],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("Áro De Pọínt Daún-Lẹft"),
keywords: &[
"Dairẹ\u{301}kshọn",
"Kádínal",
"Saútwẹst",
"Áro",
"Áro De Pọínt Daún-Lẹft",
],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("strzałka w dół w lewo"),
keywords: &[
"kierunek",
"południowy zachód",
"strzałka",
"strzałka na dół w lewo",
"strzałka w dół w lewo",
],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("ښکته چپ اړخ ته غشی"),
keywords: &[
"انټر کارډينل",
"اړخ",
"سويلي ختيځ",
"غشی",
"چپ ښکته غشی",
"ښکته چپ اړخ ته غشی",
],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("seta para baixo e para a esquerda"),
keywords: &[
"direção",
"intercardinal",
"seta",
"seta para baixo e para a esquerda",
"sudoeste",
],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("seta para baixo e para a esquerda"),
keywords: &[
"direção",
"intermédio",
"seta",
"seta para baixo e para a esquerda",
"sudoeste",
],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("uran lluq’i wach’i"),
keywords: &["uran lluq’i wach’i"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("săgeată orientată în stânga jos"),
keywords: &[
"direcție",
"intercardinal",
"sud-vest",
"săgeată",
"săgeată orientată în stânga jos",
],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-011"),
keywords: &["E10-011"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("стрелка влево-вниз"),
keywords: &["влево-вниз", "направление", "стрелка", "юго-запад"],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("umwambi wo hasi ahagana ibumoso"),
keywords: &[
"amajyepfo ashyira iburengerazuba",
"icyerekezo",
"kiri hagati y’ibyerekezo bine",
"umwambi",
"umwambi wo hasi ahagana ibumoso",
],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("هيٺ کاٻي طرف تير"),
keywords: &["انٽرڪارڊينل", "تير", "طرف", "هيٺ کاٻي طرف تير", "ڏکڻ اولھ"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("යට\u{dd2}-වම\u{dca} ඊතලය"),
keywords: &[
"ඉන\u{dca}ටර\u{dca}ක\u{dcf}ඩ\u{dd2}නල\u{dca}",
"ඊතලය",
"ද\u{dd2}ශ\u{dcf}ව",
"න\u{dd2}ර\u{dd2}ත",
"යට\u{dd2}-වම\u{dca} ඊතලය",
],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("šípka doľava nadol"),
keywords: &["doľava nadol", "juhozápad", "šípka", "šípka doľava nadol"],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("puščica levo dol"),
keywords: &[
"jugozahod",
"puščica",
"puščica levo dol",
"smer",
"stransko",
],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("fallaarta hoose ee bidix"),
keywords: &[
"fallaar",
"fallaarta hoose ee bidix",
"isdhaafsiga jihooyinka",
"jihada",
"koonfur galbeed",
],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("shigjetë poshtë majtas"),
keywords: &[
"drejtim",
"i ndërmjetëm",
"jugperëndim",
"shigjetë",
"shigjetë poshtë majtas",
],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелица надоле и налево"),
keywords: &["jугoзaпaд", "смер", "стрeлицa", "стрелица надоле и налево"],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("стрелица надоље и налијево"),
keywords: &["стрелица надоље и налијево"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelica nadole i nalevo"),
keywords: &["jugozapad", "smer", "strelica", "strelica nadole i nalevo"],
},
#[cfg(feature = "sr_Latn_BA")]
crate::Annotation {
lang: "sr_Latn_BA",
tts: Some("strelica nadolje i nalijevo"),
keywords: &["strelica nadolje i nalijevo"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("nedåtpil vänster"),
keywords: &["nedåt", "nedåtpil vänster", "pil", "vänster"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mshale unaoelekeza chini kushoto"),
keywords: &[
"kati ya sehemu kuu ya dira",
"kusini magharibi",
"mshale",
"mshale unaoelekeza chini kushoto",
"uelekeo",
],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some("க\u{bc0}ழ\u{bcd}-இடது நோக\u{bcd}கிய அம\u{bcd}புக\u{bcd}குறி"),
keywords: &[
"க\u{bc0}ழ\u{bcd}-இடது நோக\u{bcd}கிய அம\u{bcd}புக\u{bcd}குறி",
"திசை",
"தென\u{bcd}மேற\u{bcd}கு",
],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("ద\u{c3f}గువ ఎడమ బ\u{c3e}ణం"),
keywords: &[
"ద\u{c3f}గువ ఎడమ బ\u{c3e}ణం",
"ద\u{c3f}శ",
"న\u{c48}రుత\u{c3f}",
"బ\u{c3e}ణం",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("тири поёну чап"),
keywords: &["мобайнӣ", "самт", "тир", "тири поёну чап", "ҷанубу ғарб"],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("ล\u{e39}กศรช\u{e35}\u{e49}ม\u{e38}\u{e38}มซ\u{e49}ายล\u{e48}าง"),
keywords: &[
"ตะว\u{e31}นตกเฉ\u{e35}ยงใต\u{e49}",
"ท\u{e34}ศทาง",
"ล\u{e39}กศร",
"ล\u{e39}กศรช\u{e35}\u{e49}ม\u{e38}\u{e38}มซ\u{e49}ายล\u{e48}าง",
],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("ምልክት ናብ-ጸጋማይ ታሕቲ"),
keywords: &[
"ምልክት",
"ምልክት ናብ-ጸጋማይ ታሕቲ",
"ኣንፈት",
"ውሽጣዊ ናይ ካርዲናል",
"ደቡባዊ ምዕራብ",
],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("aşak-çepe ok"),
keywords: &["aşak-çepe ok", "gytak", "günorta-günbatar", "ok", "ugur"],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau ki lalo toʻohema"),
keywords: &["hema", "lalo", "ngahau", "ngahau ki lalo toʻohema"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("sol aşağı ok"),
keywords: &["ara yön", "güney batı", "ok", "sol aşağı ok", "yön"],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("ئاستى سول كۆرسەتكۈچ"),
keywords: &[
"ئارا يۆنىلىشلىك",
"ئاستى سول كۆرسەتكۈچ",
"غەربىي-شىمال",
"كۆرسەتكۈچ",
"يۆنىلىش",
],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("стрілка вниз уліво"),
keywords: &[
"напрям",
"проміжний",
"південно-західний",
"стрілка",
"стрілка вниз уліво",
"стрілка вниз і ліворуч",
],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("نیچے بائیں تیر"),
keywords: &["تیر", "جنوب مغرب", "سائن", "سمت", "نیچے بائیں تیر"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("past-chapga strelka"),
keywords: &[
"chiziq",
"janubi g‘arb",
"past-chapga strelka",
"past-chapga yo‘nalish",
"yo‘nalish",
],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("mũi tên xuống bên trái"),
keywords: &[
"hướng",
"mũi tên",
"mũi tên xuống bên trái",
"nhiều hướng",
"tây nam",
],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("suuf-càmmoñ fett"),
keywords: &[
"fett",
"interkàrdinal",
"jubluwaay",
"sowwu-gànjool",
"suuf-càmmoñ fett",
],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("utolo olusezantsi ekhohlo"),
keywords: &[
"entshona mzantsi",
"ukhardinale ohlangeneyo",
"ulwalathiso",
"utolo",
"utolo olusezantsi ekhohlo",
],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("àmì ilẹ\u{300} ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà apá òsì"),
keywords: &[
"iwọ\u{300} oòrùn gúúsù",
"àmì ilẹ\u{300} ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà apá òsì",
"àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà",
"ìlọ\u{301}pọ\u{300} kádínàlì",
"ìtọ\u{301}sọ\u{301}nà",
],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("向左下箭咀"),
keywords: &["向左下箭咀", "基點間", "方向", "箭咀", "西南"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("向左下箭咀"),
keywords: &["向左下箭咀", "基点间", "方向", "箭咀", "西南"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("左下箭头"),
keywords: &["左下箭头", "方向", "标识", "西南"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("左下箭頭"),
keywords: &["左下箭頭", "方向"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("下左箭嘴"),
keywords: &["下左箭嘴", "方向"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("umcibisholo obheke phansi kwesokunxele"),
keywords: &[
"inkomba",
"ningizimuntshonalanga",
"umcibisholo",
"umcibisholo obheke phansi kwesokunxele",
],
},
],
};
#[doc = "⬅\u{fe0f}"]
pub const LEFT_ARROW: crate::Emoji = crate::Emoji {
glyph: "⬅\u{fe0f}",
codepoint: "2B05 FE0F",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "left arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[crate::Emoji {
glyph: "⬅",
codepoint: "2B05",
status: crate::Status::Unqualified,
introduction_version: 0.6f32,
name: "left arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: true,
variants: &[],
annotations: &[],
}],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("pyl na links"),
keywords: &["links", "pyl", "pyl na links", "rigting", "wes"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("ወደ ግራ ጠቋሚ ቀስት"),
keywords: &["ምዕራብ", "ቀስት", "አቅጣጫ", "ካርዲናል", "ወደ ግራ ጠቋሚ ቀስት"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("سهم لليسار"),
keywords: &["اتجاه", "سهم", "سهم لليسار", "غرب", "كاردينال"],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some("ব\u{9be}ও\u{981}ম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}"),
keywords: &[
"ক\u{9be}\u{981}ড\u{9bc}",
"দিশ",
"পশ\u{9cd}চিম",
"ব\u{9be}ও\u{981}ম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}",
"ম\u{9c1}খ\u{9cd}য দিশবিল\u{9be}ক",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("sola ox"),
keywords: &["istiqamət", "kardinal", "ox", "qərb", "sola ox"],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("стрэлка ўлева"),
keywords: &["захад", "кірунак", "стрэлка", "стрэлка ўлева", "улева"],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("Стрелка наляво"),
keywords: &["Стрелка наляво", "запад", "посока", "стрелка"],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("ব\u{9be}ম তীর"),
keywords: &[
"তীর",
"দিকনির\u{9cd}দেশ",
"পরিম\u{9be}ণব\u{9be}চক",
"পশ\u{9cd}চিম",
"ব\u{9be}ম তীর",
],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelica nalijevo"),
keywords: &["smjer", "strelica", "strelica nalijevo", "zapad"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("fletxa cap a l’esquerra"),
keywords: &[
"direcció",
"esquerra",
"fletxa",
"fletxa cap a l’esquerra",
"oest",
],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎠᎦᏍᎦᏂ ᎦᏝᏗ"),
keywords: &["ᎠᎦᏍᎦᏂ ᎦᏝᏗ", "ᎤᏕᎵᎬᎢ", "ᎦᏝᏗ", "ᏂᏚᏳᎪᏛᎢ", "ᏧᏂᏍᎨᏓ"],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("šipka doleva"),
keywords: &[
"hlavní",
"směr",
"strany",
"světové",
"západ",
"šipka",
"šipka doleva",
],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("saeth i’r chwith"),
keywords: &["Gorllewin", "cyfeiriad", "saeth", "saeth i’r chwith"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("pil mod venstre"),
keywords: &["pil", "pil mod venstre", "retning", "verdenshjørne", "vest"],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("Pfeil nach links"),
keywords: &[
"Linkspfeil",
"Pfeil",
"Pfeil nach links",
"Westen",
"nach links",
],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("αριστερό βέλος"),
keywords: &["απόλυτη", "αριστερό βέλος", "βέλος", "δυτικά", "κατεύθυνση"],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("left arrow"),
keywords: &["arrow", "cardinal", "direction", "left arrow", "west"],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flecha hacia la izquierda"),
keywords: &["flecha", "flecha hacia la izquierda", "izquierda", "oeste"],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("↑↑↑"),
keywords: &[
"dirección",
"flecha",
"flecha hacia el oeste",
"flecha hacia la izquierda",
"izquierda",
"oeste",
],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &[
"cardinal",
"dirección",
"flecha",
"flecha hacia la izquierda",
"oeste",
],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("nool vasakule"),
keywords: &["lääs", "nool", "nool vasakule", "suund"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("ezkerrera gezia"),
keywords: &[
"ezkerrera gezia",
"gezi",
"kardinal",
"mendebalde",
"norabide",
],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("پیکان چپ"),
keywords: &["جهت اصلی", "غرب", "مسیر", "پیکان", "پیکان چپ"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("nuoli vasemmalle"),
keywords: &[
"ilmansuunta",
"länsi",
"nuoli",
"nuoli vasemmalle",
"pääilmansuunta",
],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("pakaliwang arrow"),
keywords: &[
"arrow",
"cardinal",
"direksyon",
"kanluran",
"pakaliwa",
"pakaliwang arrow",
],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("pílur sum peikar til vinstru"),
keywords: &[
"k´ós",
"pílur",
"pílur sum peikar til vinstru",
"vestur",
"ætt",
],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèche gauche"),
keywords: &["direction", "flèche", "flèche gauche", "ouest"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("flèche pointant vers la gauche"),
keywords: &[
"direction",
"flèche",
"flèche pointant vers la gauche",
"ouest",
"point cardinal",
"à gauche",
],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("saighead ar chlé"),
keywords: &["príomhaird", "saighead", "saighead ar chlé", "siar", "treo"],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("saighead gun iar"),
keywords: &[
"combaist",
"comhair",
"iar",
"saighead",
"saighead gun iar",
"àirde",
],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frecha cara á esquerda"),
keywords: &["dirección", "esquerda", "frecha cara á esquerda", "oeste"],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("ડાબ\u{ac1}\u{a82} તીર"),
keywords: &[
"ડાબ\u{ac1}\u{a82} તીર",
"તીર",
"દિશા",
"પશ\u{acd}ચિમ",
"મ\u{ac1}ખ\u{acd}ય",
],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("kibiyar hagu"),
keywords: &["kibiya", "kibiyar hagu", "shiyya", "tsinin shiyya", "yamma"],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("חץ שמאלה"),
keywords: &["חץ", "כיוון", "מערב", "שמאלה"],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("बाया\u{901} तीर"),
keywords: &[
"कार\u{94d}डिनल",
"तीर",
"पश\u{94d}चिम दिशा",
"बाया\u{901} तीर",
],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("strelica prema lijevo"),
keywords: &["smjer", "strelica", "strelica prema lijevo", "zapad"],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("balra mutató nyíl"),
keywords: &["balra", "balra mutató nyíl", "irány", "nyugat", "nyíl"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("ձախ սլաք"),
keywords: &["արևմուտք", "գլխավոր", "ձախ սլաք", "ուղղություն", "սլաք"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah kiri"),
keywords: &["arah", "barat", "kardinal", "panah", "tanda panah kiri"],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("ube akaekpe"),
keywords: &["kadịnal", "nduzi", "ube", "ube akaekpe", "ọdịda anyanwụ"],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("ör til vinstri"),
keywords: &["höfuðátt", "vestur", "átt", "ör", "ör til vinstri"],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("freccia rivolta a sinistra"),
keywords: &[
"direzione",
"freccia",
"freccia a sinistra",
"freccia rivolta a sinistra",
"ovest",
"punto cardinale",
],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("左矢印"),
keywords: &["左", "左矢印", "矢印", "西"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("panah ngiwa"),
keywords: &["arah", "kardinal", "kulon", "panah", "panah ngiwa"],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ისარი მარცხნივ"),
keywords: &[
"დასავლეთი",
"ისარი",
"ისარი მარცხნივ",
"კარდინალური",
"მიმართულება",
],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("aneccab azelmaḍ"),
keywords: &["aneccab azelmaḍ"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("сол жақ бағытты көрсеткі"),
keywords: &[
"батыс",
"бағыт",
"көрсеткі",
"негізгі",
"сол жақ бағытты көрсеткі",
],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("pil mod venstre"),
keywords: &["pil", "pil mod venstre", "retning", "verdenshjørne", "vest"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some("ព\u{17d2}រ\u{17bd}ញទៅឆ\u{17d2}វេង"),
keywords: &[
"ទ\u{17b7}ស",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញទៅឆ\u{17d2}វេង",
"ល\u{17b7}ច",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಎಡಭಾಗದ ಬಾಣ"),
keywords: &[
"ಎಡ ಬಾಣ",
"ಎಡಭಾಗದ ಬಾಣ",
"ದ\u{cbf}ಕ\u{ccd}ಕು ಪಶ\u{ccd}ಚ\u{cbf}ಮ",
"ನ\u{cbf}ರ\u{ccd}ದೇಶನ",
"ಬಾಣದ ಗುರುತು",
],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("좌향 화살표"),
keywords: &["서쪽", "좌향 화살표", "화살표"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("दावो बाण"),
keywords: &[
"अस\u{94d}त\u{902}त",
"दावो बाण",
"दिशा",
"बाण",
"म\u{941}ख\u{947}लदिशा",
],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("сол жакты караган жебе"),
keywords: &["багыт", "батыш", "жебе", "сол жакты караган жебе"],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("Feil no lénks"),
keywords: &[
"Feil",
"Feil no lénks",
"Himmelsrichtung",
"Richtung",
"Westen",
],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນຊ\u{ec9}າຍ"),
keywords: &[
"ຊ\u{ec9}າຍ",
"ທ\u{eb4}ດທາງ",
"ລ\u{eb9}ກສອນ",
"ລ\u{eb9}ກສອນຊ\u{ec9}າຍ",
],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("rodyklė į kairę"),
keywords: &[
"koordinatė",
"kryptis",
"rodyklė",
"rodyklė į kairę",
"vakarai",
],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("kreisā bultiņa"),
keywords: &[
"bultiņa",
"kreisā bultiņa",
"norāde",
"pa kreisi vērsta bultiņa",
"rietumi",
],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pere mauī"),
keywords: &["ahunga", "matua", "pere", "pere mauī", "uru"],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("стрелка налево"),
keywords: &["запад", "лево", "стрелка", "стрелка налево"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some(
"ഇടത\u{d4d}തേക\u{d4d}ക\u{d4d} ച\u{d42}ണ\u{d4d}ട\u{d41}ന\u{d4d}ന അമ\u{d4d}പടയ\u{d3e}ളം",
),
keywords: &[
"അമ\u{d4d}പടയ\u{d3e}ളം",
"ഇടത\u{d4d}തേക\u{d4d}ക\u{d4d} ച\u{d42}ണ\u{d4d}ട\u{d41}ന\u{d4d}ന അമ\u{d4d}പടയ\u{d3e}ളം",
"ക\u{d3e}ർഡിനൽ",
"ദിശ",
"പടിഞ\u{d4d}ഞ\u{d3e}റ\u{d4d}",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("зүүн сум"),
keywords: &["баруун", "зүүн сум", "кардинал", "сум", "чиглэл"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("डावा बाण"),
keywords: &[
"डावा बाण",
"दिशा",
"पश\u{94d}चिम",
"बाण",
"महत\u{94d}वाच\u{947}",
],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("anak panah ke kiri"),
keywords: &[
"anak panah",
"anak panah ke kiri",
"arah",
"barat",
"kardinal",
],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("vleġġa xellugija"),
keywords: &[
"direzzjoni",
"il-punent",
"kardinal",
"vleġġa",
"vleġġa xellugija",
],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("ဘယ\u{103a}ည\u{103d}\u{103e}န\u{103a}မြား"),
keywords: &[
"ဘယ\u{103a}ည\u{103d}\u{103e}န\u{103a}မြား",
"မြား",
"လမ\u{103a}းည\u{103d}\u{103e}န\u{103a}",
"အနောက\u{103a}",
"အရပ\u{103a}မျက\u{103a}န\u{103e}ာပြ သင\u{103a}\u{1039}ကေတ",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("pil venstre"),
keywords: &["pil", "pil venstre", "retning", "venstrepil", "vest"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("बाया\u{901}तर\u{94d}फको तीर"),
keywords: &[
"कार\u{94d}डिनल",
"तीर",
"दिशा",
"पश\u{94d}चिम",
"बाया\u{901}तर\u{94d}फको तीर",
],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("pijl naar links"),
keywords: &[
"pijl",
"pijl naar links",
"richting",
"west",
"windrichting",
],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("pil venstre"),
keywords: &["pil", "pil venstre", "retning", "venstrepil", "vest"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ବ\u{b3e}ମ ତୀର"),
keywords: &[
"ତୀର",
"ଦ\u{b3f}ଗ",
"ପଶ\u{b4d}ଚ\u{b3f}ମ",
"ପ\u{b4d}ରଧ\u{b3e}ନ",
"ବ\u{b3e}ମ ତୀର",
],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਖ\u{a71}ਬ\u{a47} ਤੀਰ"),
keywords: &[
"ਕਾਰਡੀਨਲ",
"ਖ\u{a71}ਬ\u{a47} ਤੀਰ",
"ਤੀਰ",
"ਦਿਸ\u{a3c}ਾ",
"ਪ\u{a71}ਛਮ",
],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("کھبا تیر"),
keywords: &["اہم", "تیر", "سمت", "مغرب", "کھبا تیر"],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("Áro De Pọínt Lẹft"),
keywords: &[
"Dairẹ\u{301}kshọn",
"Kádínal",
"Wẹst",
"Áro",
"Áro De Pọínt Lẹft",
],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("strzałka w lewo"),
keywords: &["kierunek", "strzałka", "strzałka w lewo", "zachód"],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("چپ غشی"),
keywords: &["اساسي", "اړخ", "غشی", "لوېديځ", "چپ غشی"],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("seta para a esquerda"),
keywords: &[
"direção",
"esquerda",
"oeste",
"seta",
"seta para a esquerda",
],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("↑↑↑"),
keywords: &[
"cardeal",
"direção",
"oeste",
"seta",
"seta para a esquerda",
],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("lluq’i wach’i"),
keywords: &["lluq’i wach’i"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("săgeată orientată în stânga"),
keywords: &[
"cardinal",
"direcție",
"săgeată",
"săgeată orientată în stânga",
"vest",
],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-156"),
keywords: &["E10-156"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("стрелка влево"),
keywords: &["влево", "запад", "направление", "стрелка"],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("akambi k’ibumoso"),
keywords: &[
"akambi k’ibumoso",
"iburengerazuba",
"icyerekezo",
"kimwe mu byerekezo bine",
"umwambi",
],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("کاٻي طرف تير"),
keywords: &["اولھ", "تير", "طرف", "کاٻي طرف تير", "ڪارڊينل"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("වම\u{dca} ඊතලය"),
keywords: &[
"ඊතලය",
"ක\u{dcf}ඩ\u{dd2}නල\u{dca}",
"ද\u{dd2}ශ\u{dcf}ව",
"බටහ\u{dd2}ර",
"වම\u{dca} ඊතලය",
],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("šípka doľava"),
keywords: &["doľava", "západ", "šípka"],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("puščica levo"),
keywords: &["glavno", "puščica", "puščica levo", "smer", "zahod"],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("fallarta bidix"),
keywords: &[
"afarta jiho",
"fallaar",
"fallaarta bidix",
"fallarta bidix",
"galbeed",
"jiho",
],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("shigjeta majtas"),
keywords: &[
"drejtim",
"kryesor",
"perëndim",
"shigjeta majtas",
"shigjetë",
],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелица налево"),
keywords: &["зaпaд", "смер", "стрeлицa", "стрелица налево"],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("стрелица налијево"),
keywords: &["стрелица налијево"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelica nalevo"),
keywords: &["smer", "strelica", "strelica nalevo", "zapad"],
},
#[cfg(feature = "sr_Latn_BA")]
crate::Annotation {
lang: "sr_Latn_BA",
tts: Some("strelica nalijevo"),
keywords: &["strelica nalijevo"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("vänsterpil"),
keywords: &["pil", "riktning", "väderstreck", "vänsterpil", "väster"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mshale unaoelekeza kushoto"),
keywords: &[
"magharibi",
"mshale",
"mshale unaoelekeza kushoto",
"sehemu kuu ya dira",
"uelekeo",
],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some("இடது நோக\u{bcd}கிய அம\u{bcd}புக\u{bcd}குறி"),
keywords: &[
"அம\u{bcd}புக\u{bcd}குறி",
"இடது நோக\u{bcd}கிய அம\u{bcd}புக\u{bcd}குறி",
"க\u{bbe}ர\u{bcd}டினல\u{bcd}",
"திசை",
],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("ఎడమ బ\u{c3e}ణం"),
keywords: &[
"ఎడమ బ\u{c3e}ణం",
"క\u{c3e}ర\u{c4d}డ\u{c3f}నల\u{c4d}",
"ద\u{c3f}శ",
"బ\u{c3e}ణం",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("тири чап"),
keywords: &["асосӣ", "самт", "тир", "тири чап", "ғарб"],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("ล\u{e39}กศรช\u{e35}\u{e49}ไปทางซ\u{e49}าย"),
keywords: &[
"ตะว\u{e31}นตก",
"ท\u{e34}ศทาง",
"ล\u{e39}กศร",
"ล\u{e39}กศรช\u{e35}\u{e49}ไปทางซ\u{e49}าย",
],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("ምልክት ናብ-ጸጋም"),
keywords: &["ምልክት", "ምልክት ናብ-ጸጋም", "ምዕራብ", "ኣንፈት", "ካርዲና፡"],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("çepe ok"),
keywords: &["göni", "günbatar", "ok", "ugur", "çepe ok"],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau ki toʻohema"),
keywords: &["hema", "ngahau", "ngahau ki toʻohema"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("sol ok"),
keywords: &["ana yön", "batı", "ok", "sol ok", "yön"],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("سول كۆرسەتكۈچ"),
keywords: &["ئاساس", "سول كۆرسەتكۈچ", "غەرب", "كۆرسەتكۈچ", "يۆنىلىش"],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("стрілка вліво"),
keywords: &[
"західний",
"напрям",
"сторона",
"стрілка вліво",
"стрілка ліворуч",
],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("بائیں تیر"),
keywords: &["بائیں تیر", "تیر", "سمت", "مغرب", "کارڈینل"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("chapga strelka"),
keywords: &["chapga strelka", "g‘arb", "strelka", "yo‘nalish"],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("mũi tên trái"),
keywords: &["chiều hướng", "chính", "mũi tên", "mũi tên trái", "tây"],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("fettu càmmoñ"),
keywords: &["fett", "fettu càmmoñ", "jubluwaay", "kàrdinal", "sowwu"],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("utolo olusekhohlo"),
keywords: &[
"entshona",
"ukhardinale",
"ulwalathiso",
"utolo",
"utolo olusekhohlo",
],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà apá òsì"),
keywords: &[
"iwọ\u{300} oòrùn",
"kádínàlì",
"àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà",
"àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà apá òsì",
"ìtọ\u{301}sọ\u{301}nà",
],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("向左箭咀"),
keywords: &["向左箭咀", "基點", "方向", "箭咀", "西"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("向左箭咀"),
keywords: &["向左箭咀", "基点", "方向", "箭咀", "西"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("向左箭头"),
keywords: &["向左箭头", "方向", "标识", "西"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("向左箭頭"),
keywords: &["向左箭頭", "方向"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("左箭嘴"),
keywords: &["左箭嘴", "方向"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("inkombasinxele"),
keywords: &["inkomba", "inkombasinxele", "ntshonalanga"],
},
],
};
#[doc = "↖\u{fe0f}"]
pub const UP_LEFT_ARROW: crate::Emoji = crate::Emoji {
glyph: "↖\u{fe0f}",
codepoint: "2196 FE0F",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "up-left arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[crate::Emoji {
glyph: "↖",
codepoint: "2196",
status: crate::Status::Unqualified,
introduction_version: 0.6f32,
name: "up-left arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: true,
variants: &[],
annotations: &[],
}],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("pyl na links bo"),
keywords: &["noordwes", "pyl", "pyl na links bo", "rigting"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("ወደ ላይ ግራ ጠቋሚ ቀስት"),
keywords: &["ሰሜን ምዕራብ", "ቀስት", "አቅጣጫ", "ኢንተርካርዲናል", "ወደ ላይ ግራ ጠቋሚ ቀስት"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("سهم لأعلى اليسار"),
keywords: &["اتجاه", "سهم", "سهم لأعلى اليسار", "شمال غرب", "كاردينال"],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some(
"ঊৰ\u{9cd}ধ\u{9cd}বগ\u{9be}মী ব\u{9be}ও\u{981}ম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}",
),
keywords: &[
"আন\u{9cd}তঃদিশ",
"উত\u{9cd}তৰ-পশ\u{9cd}চিম",
"ঊৰ\u{9cd}ধ\u{9cd}বগ\u{9be}মী ব\u{9be}ও\u{981}ম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}",
"ক\u{9be}\u{981}ড\u{9bc}",
"দিশ",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("üzüyuxarı sola yönəlmiş ox"),
keywords: &[
"interkardinal",
"istiqamət",
"ox",
"üzüyuxarı sola yönəlmiş ox",
"şimal qərb",
],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("стрэлка ўверх-улева"),
keywords: &[
"кірунак",
"паўночны захад",
"стрэлка",
"стрэлка ўверх-улева",
"уверх-улева",
],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("стрелка нагоре и наляво"),
keywords: &[
"посока",
"северозапад",
"стрелка",
"стрелка нагоре и наляво",
],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("উপরে ব\u{9be}মে তীর"),
keywords: &[
"আন\u{9cd}তঃ দিগনির\u{9cd}ণয\u{9bc}",
"উত\u{9cd}তর-পশ\u{9cd}চিম",
"উপরে ব\u{9be}মে তীর",
"তীর",
"দিক",
],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelica gore-lijevo"),
keywords: &["sjeverozapad", "smjer", "strelica", "strelica gore-lijevo"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("fletxa cap amunt a l’esquerra"),
keywords: &[
"amunt a l’esquerra",
"direcció",
"fletxa",
"fletxa cap amunt a l’esquerra",
"nord-oest",
],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎦᎸᎳᏗ-ᎠᎦᏍᎦᏂ ᎦᏝᏗ"),
keywords: &[
"ᎤᏴᏢᎢᏭᏕᎵᎬᎢ",
"ᎦᎸᎳᏗ-ᎠᎦᏍᎦᏂ ᎦᏝᏗ",
"ᎦᏝᏗ",
"ᏂᏚᏳᎪᏛᎢ",
"ᏅᎩᏂᏚᏳᎪᏛᎢ ᏂᏚᏓᎸᏗᏎᎯᎯ",
],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("šipka doleva nahoru"),
keywords: &[
"severozápad",
"směr",
"strany",
"světové",
"vedlejší",
"šipka",
"šipka doleva nahoru",
],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("saeth i fyny-chwith"),
keywords: &[
"Gogledd-orllewin",
"cyfeiriad",
"saeth",
"saeth i fyny-chwith",
],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("opadvendt pil mod venstre"),
keywords: &["nordvest", "opadvendt pil mod venstre", "pil", "retning"],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("Pfeil nach links oben"),
keywords: &[
"Nordwesten",
"Pfeil",
"Pfeil nach links oben",
"nach links oben",
],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("πάνω αριστερό βέλος"),
keywords: &[
"βέλος",
"βορειοδυτικά",
"ενδιάμεση",
"κατεύθυνση",
"πάνω αριστερό βέλος",
],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("up-left arrow"),
keywords: &[
"arrow",
"direction",
"intercardinal",
"northwest",
"up-left arrow",
],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flecha hacia la esquina superior izquierda"),
keywords: &[
"arriba",
"dirección",
"flecha",
"flecha hacia la esquina superior izquierda",
"izquierda",
"noroeste",
],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("↑↑↑"),
keywords: &[
"arriba",
"dirección",
"flecha",
"flecha hacia el noroeste",
"flecha hacia la esquina superior izquierda",
"noroeste",
],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &[
"arriba",
"dirección",
"flecha",
"flecha hacia el noroeste",
"flecha hacia la esquina superior izquierda",
"noroeste",
],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("nool üles vasakule"),
keywords: &["loe", "nool", "nool üles vasakule", "suund"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("gora eta ezkerrera gezia"),
keywords: &[
"gezi",
"gora eta ezkerrera gezia",
"ipar-mendebalde",
"kardinal arteko",
"norabide",
],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("پیکان بالا چپ"),
keywords: &["جهت فرعی", "شمال غربی", "مسیر", "پیکان", "پیکان بالا چپ"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("nuoli ylävasemmalle"),
keywords: &[
"ilmansuunta",
"luode",
"nuoli",
"nuoli ylävasemmalle",
"väli-ilmansuunta",
],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("pataas na pakaliwang arrow"),
keywords: &[
"arrow",
"direksyon",
"hilagang-kanluran",
"intercardinal",
"pakaliwang",
"pataas",
"pataas na pakaliwang arrow",
],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("pílur sum peikar uppeftir og til vinstru"),
keywords: &[
"k´ós",
"pílur",
"pílur sum peikar uppeftir og til vinstru",
"ætt",
"útnyrðingur",
],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèche haut gauche"),
keywords: &["direction", "flèche", "flèche haut gauche", "nord-ouest"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("flèche pointant vers le haut à gauche"),
keywords: &[
"direction",
"en haut à gauche",
"flèche",
"flèche pointant vers le haut à gauche",
"nord-ouest",
"point intercardinal",
],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("saighead suas ar chlé"),
keywords: &[
"idirmheánach",
"saighead",
"saighead suas ar chlé",
"siar ó thuaidh",
"treo",
],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("saighead gun iar-thuath"),
keywords: &[
"combaist",
"comhair",
"iar-thuath",
"saighead",
"saighead gun iar-thuath",
"àirde",
],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frecha cara arriba á esquerda"),
keywords: &[
"arriba",
"dirección",
"frecha",
"frecha cara arriba á esquerda",
"noroeste",
],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("ઉપર-ડાબ\u{ac1}\u{a82} તીર"),
keywords: &["ઉપર-ડાબ\u{ac1}\u{a82} તીર", "પ\u{ac2}ર\u{acd}વપશ\u{acd}ચિમ"],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("kibiyar sama ta hagu"),
keywords: &[
"a ƙetaren tsini shiyya",
"arewa maso yamma",
"kibiya",
"kibiyar sama ta hagu",
"shiyya",
],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("חץ למעלה ושמאלה"),
keywords: &[
"חץ",
"חץ למעלה ושמאלה",
"כיוון",
"למעלה",
"צפון-מערב",
"שמאלה",
],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("ऊपर-बाया\u{901} तीर"),
keywords: &[
"इ\u{902}टरकार\u{94d}डिनल",
"उत\u{94d}तर-पश\u{94d}चिम दिशा",
"ऊपर-बाया\u{901} तीर",
],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("strelica prema gore-lijevo"),
keywords: &[
"sjeverozapad",
"smjer",
"strelica",
"strelica prema gore-lijevo",
],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("balra felfelé mutató nyíl"),
keywords: &["balra felfelé mutató nyíl", "irány", "nyíl", "északnyugat"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("վերև ձախ սլաք"),
keywords: &["հյուսիս-արևմուտք", "ուղղություն", "սլաք", "վերև ձախ սլաք"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah kiri atas"),
keywords: &[
"arah",
"barat laut",
"interkardinal",
"panah",
"tanda panah kiri atas",
],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("ube akaekpe-dị elu"),
keywords: &[
"intakadịnal",
"nduzi",
"ube",
"ube akaekpe-dị elu",
"ugwuọdịda anyanwụ",
],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("ör ská upp til vinstri"),
keywords: &["norðvestur", "átt", "ör", "ör ská upp til vinstri"],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("freccia in alto a sinistra"),
keywords: &[
"direzione",
"freccia",
"freccia in alto a sinistra",
"nord-ovest",
"punto intercardinale",
],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("左上矢印"),
keywords: &["北西", "左上", "左上矢印", "矢印"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("panah munggah-ngiwa"),
keywords: &[
"arah",
"interkardinal",
"kulon-lor",
"panah",
"panah munggah-ngiwa",
],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ისარი ზემოთ და მარცხნივ"),
keywords: &[
"ინტერკარდინალური",
"ისარი",
"ისარი ზემოთ და მარცხნივ",
"კარდინალური",
"მიმართულება",
"ჩრდილო-დასავლეთი",
],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("aneccab asawen azelmaḍ"),
keywords: &["aneccab asawen azelmaḍ"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("жоғарғы сол жақ көрсеткісі"),
keywords: &[
"бағыт",
"жоғарғы сол жақ көрсеткісі",
"көрсеткі",
"румба аралық",
"солтүстік-батыс",
],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("opadvendt pil mod venstre"),
keywords: &["nordvest", "opadvendt pil mod venstre", "pil", "retning"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some(
"ព\u{17d2}រ\u{17bd}ញទៅលើងាកទៅឆ\u{17d2}វេងក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}នជ\u{17d2}រ\u{17bb}ង",
),
keywords: &[
"ទ\u{17b7}ស",
"ទ\u{17b7}សដៅ",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញទៅលើងាកទៅឆ\u{17d2}វេងក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}នជ\u{17d2}រ\u{17bb}ង",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಮೇಲ\u{cbf}ನ ಎಡ ಬಾಣ"),
keywords: &[
"ಇಂಟರ\u{ccd}ಕಾರ\u{ccd}ಡ\u{cbf}ನಲ\u{ccd}",
"ನ\u{cbf}ರ\u{ccd}ದೇಶನ",
"ಬಾಣ",
"ಮೇಲ\u{cbf}ನ ಎಡ ಬಾಣ",
"ವಾಯುವ\u{ccd}ಯ",
],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("좌상향 화살표"),
keywords: &["북서쪽", "좌상향 화살표", "화살표"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("वयर-दावो बाण"),
keywords: &["उपदिशा", "दिशा", "बाण", "वयर-दावो बाण", "वायव\u{94d}य"],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("өйдө сол жакты караган жебе"),
keywords: &[
"багыт",
"жебе",
"түндүк-батыш",
"өйдө сол жакты караган жебе",
],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("Feil no uewen a lénks"),
keywords: &[
"Feil",
"Feil no uewen a lénks",
"Himmelsrichtung",
"Nordwesten",
"Richtung",
],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນຂ\u{eb6}\u{ec9}ນມ\u{eb8}ມຊ\u{ec9}າຍ"),
keywords: &[
"ຂ\u{eb6}\u{ec9}ນ",
"ທ\u{eb4}ທາງ",
"ມ\u{eb8}ມຊ\u{ec9}າຍ",
"ລ\u{eb9}ກສອນ",
"ລ\u{eb9}ກສອນຂ\u{eb6}\u{ec9}ນມ\u{eb8}ມຊ\u{ec9}າຍ",
],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("rodyklė į viršų ir į kairę"),
keywords: &[
"kryptis",
"rodyklė",
"rodyklė į viršų ir į kairę",
"šiaurės vakarai",
],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("uz augšējo kreiso stūri vērsta bultiņa"),
keywords: &[
"bultiņa",
"norāde",
"uz augšējo kreiso stūri vērsta bultiņa",
"ziemeļrietumi",
],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pere ake-mauī"),
keywords: &[
"ahunga",
"pere",
"pere ake-mauī",
"uru-mā-raki",
"waenga-matua",
],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("стрелка нагоре лево"),
keywords: &["лево", "северозапад", "стрелка", "стрелка нагоре лево"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some("മ\u{d41}കളിൽ ഇടത\u{d4d}തേക\u{d4d}ക\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളം"),
keywords: &[
"അമ\u{d4d}പടയ\u{d3e}ളം",
"ഇന\u{d4d}റർക\u{d3e}ർഡിനൽ",
"ദിശ",
"മ\u{d41}കളിൽ ഇടത\u{d4d}തേക\u{d4d}ക\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളം",
"വടക\u{d4d}ക\u{d4d} പടിഞ\u{d4d}ഞ\u{d3e}റൻ",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("зүүн дээшээ сум"),
keywords: &["баруун хойд", "зүүн дээшээ сум", "сум", "чиглэл"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("वर-डावीकड\u{947} दर\u{94d}शविणारा बाण"),
keywords: &[
"अ\u{902}तर\u{94d}गत महत\u{94d}वाच\u{947}",
"दक\u{94d}षिणपश\u{94d}चिम",
"दिशा",
"बाण",
"वर-डावीकड\u{947} दर\u{94d}शविणारा बाण",
],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("anak panah ke penjuru kiri atas"),
keywords: &[
"anak panah",
"anak panah ke penjuru kiri atas",
"antara kardinal",
"arah",
"barat laut",
],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("vleġġa xellugija ’l fuq"),
keywords: &[
"direzzjoni",
"interkardinal",
"majjistral",
"vleġġa",
"vleġġa xellugija ’l fuq",
],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("ဘယ\u{103a}ဘက\u{103a}အပေါ\u{103a}ည\u{103d}\u{103e}န\u{103a} မြား"),
keywords: &[
"ဘယ\u{103a}ဘက\u{103a}အပေါ\u{103a}ည\u{103d}\u{103e}န\u{103a} မြား",
"ဘယ\u{103a}ဘက\u{103a}အပေါ\u{103a}ည\u{103d}\u{103e}န\u{103a}မြား",
"မြား",
"လမ\u{103a}းည\u{103d}\u{103e}န\u{103a}",
"အနောက\u{103a}မြောက\u{103a}",
"အရပ\u{103a}မျက\u{103a}န\u{103e}ာ န\u{103e}စ\u{103a}ခ\u{102f}ကြားပြ သင\u{103a}\u{1039}ကေတ",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("pil opp-venstre"),
keywords: &["nordvest", "pil", "pil opp-venstre", "retning"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("माथि बायातर\u{94d}फको तीर"),
keywords: &[
"इन\u{94d}टरकार\u{94d}डिनल",
"उत\u{94d}तर पश\u{94d}चिम",
"तीर",
"दिशा",
"माथि बायातर\u{94d}फको तीर",
],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("pijl linksomhoog"),
keywords: &[
"noordwest",
"pijl",
"pijl linksomhoog",
"richting",
"windrichting",
],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("pil opp-venstre"),
keywords: &["nordvest", "pil", "pil opp-venstre", "retning"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ଉପର-ବ\u{b3e}ମ ତୀର"),
keywords: &[
"ଅନ\u{b4d}ତଃପ\u{b4d}ରଧ\u{b3e}ନ",
"ଉତ\u{b4d}ତରପଶ\u{b4d}ଚ\u{b3f}ମ",
"ଉପର-ବ\u{b3e}ମ ତୀର",
"ତୀର",
"ଦ\u{b3f}ଗ",
],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਉ\u{a71}ਪਰ-ਖ\u{a71}ਬ\u{a47} ਤੀਰ"),
keywords: &[
"ਇ\u{a70}ਟਰਕਾਰੀਡਨਮਲ",
"ਉ\u{a71}ਤਰ-ਪ\u{a71}ਛਮ",
"ਉ\u{a71}ਪਰ-ਖ\u{a71}ਬ\u{a47} ਤੀਰ",
"ਤੀਰ",
"ਦਿਸ\u{a3c}ਾ",
],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("ا\u{64f}وتلا-کھبا تیر"),
keywords: &[
"انٹر کارڈینل",
"ا\u{64f}وتلا-کھبا تیر",
"تیر",
"سمت",
"شمال مغرب",
],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("Áro De Pọínt Ọ\u{301}p-Lẹft"),
keywords: &[
"Dairẹ\u{301}kshọn",
"Kádínal",
"Nọ\u{301}twẹst",
"Áro",
"Áro De Pọínt Ọ\u{301}p-Lẹft",
],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("strzałka w górę w lewo"),
keywords: &[
"kierunek",
"północny zachód",
"strzałka",
"strzałka do góry w lewo",
"strzałka w górę w lewo",
],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("پورته چپ غشی"),
keywords: &["انټر کارډينل", "اړخ", "سويلي ختيځ", "غشی", "پورته چپ غشی"],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("seta para cima e para a esquerda"),
keywords: &[
"direção",
"intercardinal",
"noroeste",
"seta",
"seta para cima e para a esquerda",
],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("seta para cima e para a esquerda"),
keywords: &[
"direção",
"intermédio",
"noroeste",
"seta",
"seta para cima e para a esquerda",
],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("hanaq lluq’i wach’i"),
keywords: &["hanaq lluq’i wach’i"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("săgeată orientată în stânga-sus"),
keywords: &[
"direcție",
"intercardinal",
"nord-vest",
"săgeată",
"săgeată orientată în stânga-sus",
],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-008"),
keywords: &["E10-008"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("стрелка влево-вверх"),
keywords: &["влево-вверх", "направление", "северо-запад", "стрелка"],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("umwambi wo hejuru ugana ibumoso"),
keywords: &[
"amajyaruguru ashyira iburengerazuba",
"icyerekezo",
"kiri hagati y’ibyerekezo bine",
"umwambi",
"umwambi wo hejuru ugana ibumoso",
],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("مٿ کاٻي طرف تير"),
keywords: &["اتر اولھ", "انٽرڪارڊينل", "تير", "طرف", "مٿ کاٻي طرف تير"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("උඩ\u{dd4}-වම\u{dca} ඊතලය"),
keywords: &[
"ඉන\u{dca}ටර\u{dca}ක\u{dcf}ඩ\u{dd2}නල\u{dca}",
"ඊතලය",
"උඩ\u{dd4}-වම\u{dca} ඊතලය",
"ද\u{dd2}ශ\u{dcf}ව",
"වයඹ",
],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("šípka doľava nahor"),
keywords: &["doľava nahor", "severozápad", "šípka", "šípka doľava nahor"],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("puščica levo gor"),
keywords: &[
"puščica",
"puščica levo gor",
"severozahodno",
"smer",
"stransko",
],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("fallaarta kor ee bidix"),
keywords: &[
"fallaar",
"fallaarta kor ee bidix",
"isdhaafsiga jihooyinka",
"jihada",
"waqooyi galbeed",
],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("shigjeta lart-majtas"),
keywords: &[
"drejtim",
"i ndërmjetëm",
"shigjeta lart-majtas",
"shigjetë",
"veriperëndim",
],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелица нагоре и налево"),
keywords: &[
"сeвeрoзaпaд",
"смер",
"стрeлицa",
"стрелица нагоре и налево",
],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("стрелица нагоре и налијево"),
keywords: &["стрелица нагоре и налијево"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelica nagore i nalevo"),
keywords: &[
"severozapad",
"smer",
"strelica",
"strelica nagore i nalevo",
],
},
#[cfg(feature = "sr_Latn_BA")]
crate::Annotation {
lang: "sr_Latn_BA",
tts: Some("strelica nagore i nalijevo"),
keywords: &["strelica nagore i nalijevo"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("uppåtpil vänster"),
keywords: &["pil", "uppåt", "uppåtpil vänster", "vänster"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mshale unaoelekeza juu kushoto"),
keywords: &[
"kaskazini magharibi",
"kati ya sehemu kuu ya dira",
"mshale",
"mshale unaoelekeza juu kushoto",
"uelekeo",
],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some("மேல\u{bcd}-இடது நோக\u{bcd}கிய அம\u{bcd}புக\u{bcd}குறி"),
keywords: &[
"திசை",
"மேல\u{bcd}-இடது நோக\u{bcd}கிய அம\u{bcd}புக\u{bcd}குறி",
"வடமேற\u{bcd}கு",
],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("ఎగువ ఎడమ బ\u{c3e}ణం"),
keywords: &[
"ఎగువ ఎడమ బ\u{c3e}ణం",
"ద\u{c3f}శ",
"బ\u{c3e}ణం",
"వ\u{c3e}యువ\u{c4d}యం",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("тири болову чап"),
keywords: &["мобайнӣ", "самт", "тир", "тири болову чап", "шимолу ғарб"],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("ล\u{e39}กศรช\u{e35}\u{e49}ม\u{e38}มซ\u{e49}ายบน"),
keywords: &[
"ตะว\u{e31}นตกเฉ\u{e35}ยงเหน\u{e37}อ",
"ท\u{e34}ศทาง",
"ล\u{e39}กศร",
"ล\u{e39}กศรช\u{e35}\u{e49}ม\u{e38}มซ\u{e49}ายบน",
],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("ምልክት ናብ-ጸጋማይ ላዕሊ"),
keywords: &[
"ምልክት",
"ምልክት ናብ-ጸጋማይ ላዕሊ",
"ሰሜናዊ ምዕራብ",
"ኣንፈት",
"ውሽጣዊ ናይ ካርዲናል",
],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("ýokary-çepe ok"),
keywords: &[
"demirgazyk-günbatar",
"gytak",
"ok",
"ugur",
"ýokary-çepe ok",
],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau ki ʻolunga toʻohema"),
keywords: &["hema", "ngahau", "ngahau ki ʻolunga toʻohema", "ʻolunga"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("sol yukarı ok"),
keywords: &["ara yön", "kuzey batı", "ok", "sol yukarı ok", "yön"],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("ئۈستى سول كۆرسەتكۈچ"),
keywords: &[
"ئارا يۆنىلىشلىك",
"ئۈستى سول كۆرسەتكۈچ",
"غەربىي-شىمال",
"كۆرسەتكۈچ",
"يۆنىلىش",
],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("стрілка вгору вліво"),
keywords: &[
"напрям",
"проміжний",
"північно-західний",
"стрілка вгору вліво",
"стрілка вгору та ліворуч",
],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("اوپر بائیں تیر"),
keywords: &["اوپر بائیں تیر", "تیر", "سائن", "سمت", "شمال مغرب"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("chap yuqoriga strelka"),
keywords: &[
"chap yuqoriga strelka",
"chiziq",
"shimoli-g‘arb",
"yo‘nalish",
],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("mũi tên lên bên trái"),
keywords: &[
"hướng",
"mũi tên",
"mũi tên lên bên trái",
"nhiều hướng",
"tây bắc",
],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("fettu càmmoñu-kaw"),
keywords: &[
"direction",
"fett",
"fettu càmmoñu-kaw",
"interkàrdinal",
"sowwu-gànnaar",
],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("utolo olusekhohlo phezulu"),
keywords: &[
"entshona mntla",
"ukhardinale ohlangeneyo",
"ulwalathiso",
"utolo",
"utolo olusekhohlo phezulu",
],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("àmì òkè ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà apá òsì"),
keywords: &[
"iwọ\u{300} oòrùn àríwá",
"àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà",
"àmì òkè ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà apá òsì",
"ìlọ\u{301}pọ\u{300} kádínàlì",
"ìtọ\u{301}sọ\u{301}nà",
],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("向左上箭咀"),
keywords: &["向左上箭咀", "基點間", "方向", "箭咀", "西北"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("向左上箭咀"),
keywords: &["向左上箭咀", "基点间", "方向", "箭咀", "西北"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("左上箭头"),
keywords: &["左上箭头", "方向", "标识", "西北"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("左上箭頭"),
keywords: &["左上箭頭", "方向"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("上左箭嘴"),
keywords: &["上左箭嘴", "方向"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("umcibisholo obheke phezulu kwesokunxele"),
keywords: &[
"inkomba",
"inkombakuhlanganisa",
"inyakathontshonalanga",
"umcibisholo",
"umcibisholo obheke phezulu kwesokunxele",
],
},
],
};
#[doc = "↕\u{fe0f}"]
pub const UP_DOWN_ARROW: crate::Emoji = crate::Emoji {
glyph: "↕\u{fe0f}",
codepoint: "2195 FE0F",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "up-down arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[crate::Emoji {
glyph: "↕",
codepoint: "2195",
status: crate::Status::Unqualified,
introduction_version: 0.6f32,
name: "up-down arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: true,
variants: &[],
annotations: &[],
}],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("pyl op en af"),
keywords: &["pyl op en af", "pyle"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("ወደ ላይ ታች ጠቋሚ ቀስት"),
keywords: &["ቀስት", "ወደ ላይ ታች ጠቋሚ ቀስት"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("سهم لأعلى وأسفل"),
keywords: &["سهم", "سهم لأعلى وأسفل"],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some("ঊৰ\u{9cd}ধ\u{9cd}বগ\u{9be}মী-অধোগ\u{9be}মী ক\u{9be}\u{981}ড\u{9bc}"),
keywords: &[
"ঊৰ\u{9cd}ধ\u{9cd}বগ\u{9be}মী-অধোগ\u{9be}মী ক\u{9be}\u{981}ড\u{9bc}",
"ক\u{9be}\u{981}ড\u{9bc}",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("üzü aşağı-yuxarı ox"),
keywords: &["ox", "üzü aşağı-yuxarı ox"],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("стрэлка ўверх-уніз"),
keywords: &["падвоеная", "стрэлка", "стрэлка ўверх-уніз"],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("Стрелка нагоре и надолу"),
keywords: &[
"Стрелка нагоре и надолу",
"стрелка",
"стрелка нагоре и надолу",
],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("উপরে নীচে তীর"),
keywords: &["উপরে নীচে তীর", "তীর"],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelica gore-dolje"),
keywords: &["strelica", "strelica gore-dolje"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("fletxa cap amunt i cap avall"),
keywords: &["amunt", "avall", "fletxa", "fletxa cap amunt i cap avall"],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎦᎸᎳᏗ - ᎡᎳᏗ ᎦᏝᏗ"),
keywords: &["ᎦᎸᎳᏗ - ᎡᎳᏗ ᎦᏝᏗ", "ᎦᏝᏗ"],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("šipka nahoru a dolů"),
keywords: &["šipka", "šipka nahoru a dolů"],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("saeth i fyny-lawr"),
keywords: &["saeth", "saeth i fyny-lawr"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("op- og nedadvendt pil"),
keywords: &["op- og nedadvendt pil", "pil"],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("Pfeil nach oben und unten"),
keywords: &[
"Pfeil",
"Pfeil nach oben und unten",
"entgegengesetzt",
"nach oben und unten",
],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("πάνω κάτω βέλος"),
keywords: &["βέλος", "πάνω κάτω βέλος"],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("up-down arrow"),
keywords: &["arrow", "up-down arrow"],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flecha arriba y abajo"),
keywords: &[
"abajo",
"arriba",
"dirección",
"flecha",
"flecha arriba y abajo",
],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &["flecha", "flecha arriba y abajo"],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("nool üles-alla"),
keywords: &["nool", "nool üles-alla"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("gora eta behera gezia"),
keywords: &["gezi", "gora eta behera gezia"],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("پیکان بالا و پایین"),
keywords: &["پیکان", "پیکان بالا و پایین"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("nuoli ylös ja alas"),
keywords: &["nuoli", "nuoli ylös ja alas", "suunta"],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("pataas-pababang arrow"),
keywords: &["arrow", "pababa", "pataas", "pataas-pababang arrow"],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("pílur sum peikar upp og niður"),
keywords: &["pílur", "pílur sum peikar upp og niður"],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèche haut bas"),
keywords: &["flèche", "flèche haut bas"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("flèche pointant vers le haut et vers le bas"),
keywords: &[
"bas haut",
"en bas et en haut",
"en haut et en bas",
"flèche",
"flèche pointant vers le haut et vers le bas",
"haut bas",
],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("saighead suas-síos"),
keywords: &["saighead", "saighead suas-síos"],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("saighead suas is sìos"),
keywords: &["saighead", "saighead suas is sìos"],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frecha cara arriba e abaixo"),
keywords: &[
"abaixo",
"arriba",
"dirección",
"frecha",
"frecha cara arriba e abaixo",
],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("ઉપર-નીચ\u{ac7} તીર"),
keywords: &["ઉપર-નીચ\u{ac7} તીર", "તીર"],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("kibiyar ƙasa ta sama"),
keywords: &["kibiya", "kibiyar ƙasa ta sama"],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("חץ מעלה ומטה"),
keywords: &["חץ", "חץ מעלה ומטה", "למטה", "למעלה"],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("ऊपर-नीच\u{947} तीर"),
keywords: &["ऊपर-नीच\u{947} तीर", "तीर"],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("strelica gore-dolje"),
keywords: &["strelica", "strelica gore-dolje"],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("fel-le mutató nyíl"),
keywords: &["fel-le mutató nyíl", "nyíl"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("վերև-ներքև սլաք"),
keywords: &["սլաք", "վերև-ներքև սլաք"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah atas bawah"),
keywords: &["panah", "tanda panah atas bawah"],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("ube-dị ala dị elu"),
keywords: &["ube", "ube-dị ala dị elu"],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("ör upp og niður"),
keywords: &["ör", "ör upp og niður"],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("freccia su-giù"),
keywords: &["altezza", "doppia direzione", "freccia", "freccia su-giù"],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("上下矢印"),
keywords: &["上下", "上下矢印", "矢印"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("panah munggah-mudhun"),
keywords: &["panah", "panah munggah-mudhun"],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ისარი ზემოთ და ქვემოთ"),
keywords: &["ისარი", "ისარი ზემოთ და ქვემოთ"],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("aneccab asawen akesser"),
keywords: &["aneccab asawen akesser"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("жоғары-төмен көрсеткісі"),
keywords: &["жоғары-төмен көрсеткісі", "көрсеткі"],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("op- og nedadvendt pil"),
keywords: &["op- og nedadvendt pil", "pil"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some(
"ព\u{17d2}រ\u{17bd}ញទៅលើទៅក\u{17d2}រោមក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}នជ\u{17d2}រ\u{17bb}ង",
),
keywords: &[
"ទ\u{17b7}ស",
"ទ\u{17b7}សដៅ",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញទៅលើទៅក\u{17d2}រោមក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}នជ\u{17d2}រ\u{17bb}ង",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಮೇಲ\u{cbf}ನ ಕ\u{cc6}ಳ ಬಾಣ"),
keywords: &["ಬಾಣ", "ಬಾಣದ ಗುರುತು", "ಮೇಲ\u{cbf}ನ ಕ\u{cc6}ಳ ಬಾಣ"],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("상하향 화살표"),
keywords: &["상하향 화살표", "위아래", "화살표"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("वयर-सकयल बाण"),
keywords: &["बाण", "वयर-सकयल बाण"],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("өйдө-төмөн караган жебе"),
keywords: &["жебе", "өйдө-төмөн караган жебе"],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("Feil no uewen an ënnen"),
keywords: &["Feil no uewen an ënnen", "Feiler"],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນຂ\u{eb6}\u{ec9}ນແລະລ\u{ebb}ງ"),
keywords: &[
"ຂ\u{eb6}\u{ec9}ນ",
"ລ\u{eb9}ກສອນ",
"ລ\u{eb9}ກສອນຂ\u{eb6}\u{ec9}ນແລະລ\u{ebb}ງ",
"ລ\u{ebb}ງ",
],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("rodyklė į viršų ir į apačią"),
keywords: &["rodyklė", "rodyklė į viršų ir į apačią"],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("augšup un lejup vērsta bultiņa"),
keywords: &["augšup un lejup vērsta bultiņa", "bultiņa"],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pere ake-iho"),
keywords: &["pere", "pere ake-iho"],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("стрелка нагоре-надолу"),
keywords: &["горе", "долу", "стрелка", "стрелка нагоре-надолу"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some(
"മ\u{d41}കളിലേക\u{d4d}ക\u{d41}ം ത\u{d3e}ഴേക\u{d4d}ക\u{d41}മ\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളം",
),
keywords: &[
"അമ\u{d4d}പടയ\u{d3e}ളം",
"ത\u{d3e}ഴേക\u{d4d}ക\u{d4d}",
"മ\u{d41}കളിലേക\u{d4d}ക\u{d41}ം ത\u{d3e}ഴേക\u{d4d}ക\u{d41}മ\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളം",
"മ\u{d41}കളിലേക\u{d4d}ക\u{d4d}",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("дээш доошоо сум"),
keywords: &["баруун доошоо сум", "дээш доошоо сум", "сум"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("वर-खाली दर\u{94d}शविणारा बाण"),
keywords: &["बाण", "वर-खाली दर\u{94d}शविणारा बाण"],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("anak panah ke atas ke bawah"),
keywords: &["anak panah", "anak panah ke atas ke bawah"],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("vleġġa ’l isfel u ’l isfel"),
keywords: &["vleġġa", "vleġġa ’l isfel u ’l isfel"],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("အပေါ\u{103a}−အောက\u{103a}ပြ မြား"),
keywords: &[
"မြား",
"အပေါ\u{103a} အောက\u{103a} န\u{103e}စ\u{103a}ဘက\u{103a}ပြ မြား သင\u{103a}\u{1039}ကေတ",
"အပေါ\u{103a}−အောက\u{103a}ပြ မြား",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("pil opp og ned"),
keywords: &["pil", "pil opp og ned"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("माथि-तल वाण"),
keywords: &["माथि-तल वाण", "वाण"],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("pijl omhoog en omlaag"),
keywords: &["pijl", "pijl omhoog en omlaag"],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("pil opp og ned"),
keywords: &["pil", "pil opp og ned"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ଉପର-ତଳ ତୀର"),
keywords: &["ଉପର-ତଳ ତୀର", "ତୀର"],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਉ\u{a71}ਪਰ-ਥ\u{a71}ਲ\u{a47} ਤੀਰ"),
keywords: &["ਉ\u{a71}ਪਰ-ਥ\u{a71}ਲ\u{a47} ਤੀਰ", "ਤੀਰ"],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("ا\u{64f}وتلا-ہیٹھلا تیر"),
keywords: &["ا\u{64f}وتلا-ہیٹھلا تیر", "تیر"],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("Áro De Pọínt Ọ\u{301}p An Daun"),
keywords: &["Áro", "Áro De Pọínt Ọ\u{301}p An Daun"],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("strzałka w górę i w dół"),
keywords: &[
"dwukierunkowa",
"góra-dół",
"strzałka do góry i na dół",
"strzałka w górę i w dół",
],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("پورته ښکته غشی"),
keywords: &["غشی", "پورته ښکته غشی"],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("seta para cima e para baixo"),
keywords: &[
"para baixo",
"para cima",
"seta",
"seta para cima e para baixo",
"vertical",
],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("↑↑↑"),
keywords: &[
"para baixo",
"para cima",
"seta",
"seta para cima e para baixo",
],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("hanaq uran wach’i"),
keywords: &["hanaq uran wach’i"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("săgeată sus-jos"),
keywords: &["săgeată", "săgeată sus-jos"],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-007"),
keywords: &["E10-007"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("стрелка вверх-вниз"),
keywords: &[
"вверх и вниз",
"вверх-вниз",
"двойная",
"стрелка",
"стрелка вверх вниз",
],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("akambi kazamuka-kamanuka"),
keywords: &["akambi kazamuka-kamanuka", "umwambi"],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("مٿي هيٺ طرف تير"),
keywords: &["تير", "مٿي هيٺ طرف تير"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("උඩ\u{dd4}-යට\u{dd2} ඊතලය"),
keywords: &["ඊතලය", "උඩ\u{dd4}-යට\u{dd2} ඊතලය"],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("šípka nahor aj nadol"),
keywords: &["šípka", "šípka nahor aj nadol"],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("puščica gor dol"),
keywords: &["puščica", "puščica gor dol"],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("fallaarta kor-hoose"),
keywords: &["fallaar", "fallaarta kor-hoose"],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("shigjeta lart-poshtë"),
keywords: &["shigjeta lart-poshtë", "shigjetë"],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелица нагоре и надоле"),
keywords: &["стрeлицa", "стрелица нагоре и надоле"],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("стрелица нагоре и надоље"),
keywords: &["стрелица нагоре и надоље"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelica nagore i nadole"),
keywords: &["strelica", "strelica nagore i nadole"],
},
#[cfg(feature = "sr_Latn_BA")]
crate::Annotation {
lang: "sr_Latn_BA",
tts: Some("strelica nagore i nadolje"),
keywords: &["strelica nagore i nadolje"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("pil upp och ned"),
keywords: &["nedåt", "pil", "pil upp och ned", "uppåt"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mshale unaoelekeza chini na juu"),
keywords: &["mshale", "mshale unaoelekeza chini na juu"],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some("மேல\u{bcd}-க\u{bc0}ழ\u{bcd} நோக\u{bcd}கிய அம\u{bcd}புக\u{bcd}குறி"),
keywords: &[
"அம\u{bcd}புக\u{bcd}குறி",
"மேல\u{bcd}-க\u{bc0}ழ\u{bcd} நோக\u{bcd}கிய அம\u{bcd}புக\u{bcd}குறி",
],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("ఎగువ మర\u{c3f}యు ద\u{c3f}గువ బ\u{c3e}ణం"),
keywords: &[
"ఎగువ",
"ఎగువ మర\u{c3f}యు ద\u{c3f}గువ బ\u{c3e}ణం",
"ద\u{c3f}గువ",
"బ\u{c3e}ణం",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("тири поёну боло"),
keywords: &["тир", "тири поёну боло"],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("ล\u{e39}กศรช\u{e35}\u{e49}ข\u{e36}\u{e49}นลง"),
keywords: &[
"ข\u{e35}\u{e49}น",
"ลง",
"ล\u{e39}กศร",
"ล\u{e39}กศรช\u{e35}\u{e49}ข\u{e36}\u{e49}นลง",
],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("ምልክት ናብ-ላዕሊ ታሕቲ"),
keywords: &["ምልክት", "ምልክት ናብ-ላዕሊ ታሕቲ"],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("ýokary-aşak ok"),
keywords: &["ok", "ýokary-aşak ok"],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau ki ʻolunga mo lalo"),
keywords: &[
"hake",
"hifo",
"lalo",
"ngahau",
"ngahau ki ʻolunga mo lalo",
"ʻolunga",
],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("yukarı ve aşağı ok"),
keywords: &["ok", "yukarı ve aşağı ok"],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("ئاستى-ئۈستى كۆرسەتكۈچ"),
keywords: &["ئاستى-ئۈستى كۆرسەتكۈچ", "كۆرسەتكۈچ"],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("стрілка вгору-вниз"),
keywords: &["стрілка", "стрілка вгору та вниз", "стрілка вгору-вниз"],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("اوپر نیچے تیر"),
keywords: &["اوپر نیچے تیر", "تیر", "سائن", "سمت"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("tepa-pastga strelka"),
keywords: &["strelka", "tepa-pastga strelka"],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("mũi tên lên xuống"),
keywords: &["mũi tên", "mũi tên lên xuống"],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("fettu suufu-kaw"),
keywords: &["fett", "fettu suufu-kaw"],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("utolo olusezantsi phezulu"),
keywords: &["utolo", "utolo olusezantsi phezulu"],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("ọfà tòkètilẹ\u{300}"),
keywords: &["ọfà", "ọfà tòkètilẹ\u{300}"],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("上下箭咀"),
keywords: &["上下箭咀", "箭咀"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("上下箭咀"),
keywords: &["上下箭咀", "箭咀"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("上下箭头"),
keywords: &["上下", "上下箭头", "箭头"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("上下箭頭"),
keywords: &["上下箭頭"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("上下箭嘴"),
keywords: &["上下箭嘴"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("umcibisholo ophansi phezulu"),
keywords: &["umcibisholo", "umcibisholo ophansi phezulu"],
},
],
};
#[doc = "↔\u{fe0f}"]
pub const LEFT_RIGHT_ARROW: crate::Emoji = crate::Emoji {
glyph: "↔\u{fe0f}",
codepoint: "2194 FE0F",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "left-right arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[crate::Emoji {
glyph: "↔",
codepoint: "2194",
status: crate::Status::Unqualified,
introduction_version: 0.6f32,
name: "left-right arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: true,
variants: &[],
annotations: &[],
}],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("pyl links en regs"),
keywords: &["pyl links en regs", "pyle"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("ግራ-ቀኝ ጠቋሚ ቀስት"),
keywords: &["ቀስት", "ግራ-ቀኝ ጠቋሚ ቀስት"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("سهم لليسار واليمين"),
keywords: &["سهم", "سهم لليسار واليمين"],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some("ব\u{9be}ও\u{981}ম\u{9c1}খী-সো\u{981}ম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}"),
keywords: &[
"ক\u{9be}\u{981}ড\u{9bc}",
"ব\u{9be}ও\u{981}ম\u{9c1}খী-সো\u{981}ম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("sola-sağa ox"),
keywords: &["ox", "sola-sağa ox"],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("стрэлка ўлева-ўправа"),
keywords: &["падвоеная", "стрэлка", "стрэлка ўлева-ўправа"],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("стрелка наляво и надясно"),
keywords: &["стрелка", "стрелка наляво и надясно"],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("ব\u{9be}মে-ড\u{9be}নে তীর"),
keywords: &["তীর", "ব\u{9be}মে-ড\u{9be}নে তীর"],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelica lijevo-desno"),
keywords: &["strelica", "strelica lijevo-desno"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("fletxa cap a l’esquerra i cap a la dreta"),
keywords: &[
"dreta",
"esquerra",
"fletxa",
"fletxa cap a l’esquerra i cap a la dreta",
],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎠᎦᏍᎦᏂ - ᎦᏘᏏ ᎦᏝᏗ"),
keywords: &["ᎠᎦᏍᎦᏂ - ᎦᏘᏏ ᎦᏝᏗ", "ᎦᏝᏗ"],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("šipka doleva a doprava"),
keywords: &["šipka", "šipka doleva a doprava"],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("saeth chwith-dde"),
keywords: &["saeth", "saeth chwith-dde"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("pil mod venstre og højre"),
keywords: &["pil", "pil mod venstre og højre"],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("Pfeil nach links und rechts"),
keywords: &[
"Pfeil",
"Pfeil nach links und rechts",
"entgegengesetzt",
"nach links und rechts",
],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("αριστερό δεξιό βέλος"),
keywords: &["αριστερό δεξιό βέλος", "βέλος"],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("left-right arrow"),
keywords: &["arrow", "left-right arrow"],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flecha izquierda y derecha"),
keywords: &[
"derecha",
"dirección",
"flecha",
"flecha izquierda y derecha",
"izquierda",
],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &["flecha", "flecha izquierda y derecha"],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("nool paremale-vasakule"),
keywords: &["nool", "nool paremale-vasakule"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("ezkerrera eta eskuinera gezia"),
keywords: &["ezkerrera eta eskuinera gezia", "gezi"],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("پیکان دوطرفه"),
keywords: &["پیکان", "پیکان دوطرفه"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("nuoli oikealle ja vasemmalle"),
keywords: &["nuoli", "nuoli oikealle ja vasemmalle", "suunta"],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("pakaliwa-pakanang arrow"),
keywords: &["arrow", "pakaliwa", "pakaliwa-pakanang arrow", "pakanan"],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("pílur sum peikar til høgru og vinstu"),
keywords: &["pílur", "pílur sum peikar til høgru og vinstu"],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèche gauche droite"),
keywords: &["flèche", "flèche gauche droite"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("flèche pointant à gauche et à droite"),
keywords: &[
"droite gauche",
"flèche",
"flèche pointant à gauche et à droite",
"gauche droite",
"à droite et à gauche",
"à gauche et à droite",
],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("saighead chlé-dheas"),
keywords: &["saighead", "saighead chlé-dheas"],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("saighead chlì is dheas"),
keywords: &["saighead", "saighead chlì is dheas"],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frecha cara á esquerda e a dereita"),
keywords: &[
"dereita",
"dirección",
"esquerda",
"frecha",
"frecha cara á esquerda e a dereita",
],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("ડાબ\u{ac1}\u{a82}-જમણ\u{ac1}\u{a82} તીર"),
keywords: &["ડાબ\u{ac1}\u{a82}-જમણ\u{ac1}\u{a82} તીર", "તીર"],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("kibiyar dama ta dama"),
keywords: &["kibiya", "kibiyar dama ta dama"],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("חץ שמאלה וימינה"),
keywords: &["חץ", "חץ שמאלה וימינה", "ימינה", "שמאלה"],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("बाए\u{901}-दाए\u{901} तीर"),
keywords: &["तीर", "बाए\u{901}-दाए\u{901} तीर"],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("strelica lijevo-desno"),
keywords: &["strelica", "strelica lijevo-desno"],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("balra-jobbra mutató nyíl"),
keywords: &["balra-jobbra mutató nyíl", "nyíl"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("ձախ-աջ սլաք"),
keywords: &["աջ-ձախ սլաք", "ձախ-աջ սլաք", "սլաք"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah kiri kanan"),
keywords: &["panah", "tanda panah kiri kanan"],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("ube-akanri akaekpe"),
keywords: &["ube", "ube-akanri akaekpe"],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("ör til vinstri og hægri"),
keywords: &["ör", "ör til vinstri og hægri"],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("freccia sinistra-destra"),
keywords: &[
"destra",
"doppia direzione",
"freccia",
"freccia sinistra-destra",
"larghezza",
],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("左右矢印"),
keywords: &["左右", "左右矢印", "矢印"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("panah ngiwa-nengen"),
keywords: &["panah", "panah ngiwa-nengen"],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ისარი მარცხნივ და მარჯვნივ"),
keywords: &["ისარი", "ისარი მარცხნივ და მარჯვნივ"],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("aneccab azelmaḍ ayeffus"),
keywords: &["aneccab azelmaḍ ayeffus"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("сол-оң көрсеткісі"),
keywords: &["көрсеткі", "сол-оң көрсеткісі"],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("pil mod venstre og højre"),
keywords: &["pil", "pil mod venstre og højre"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some(
"ព\u{17d2}រ\u{17bd}ញទៅឆ\u{17d2}វេងទៅស\u{17d2}តា\u{17c6}ក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}នជ\u{17d2}រ\u{17bb}ង",
),
keywords: &[
"ទ\u{17b7}ស",
"ទ\u{17b7}សដៅ",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញទៅឆ\u{17d2}វេងទៅស\u{17d2}តា\u{17c6}ក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}នជ\u{17d2}រ\u{17bb}ង",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಎಡ-ಬಲ ಬಾಣ"),
keywords: &["ಎಡ-ಬಲ ಬಾಣ", "ಎಡ-ಬಲ ಬಾಣದ ಗುರುತು"],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("좌우향 화살표"),
keywords: &["좌우", "좌우향 화살표", "화살표"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("दावो - उजवो बाण"),
keywords: &["दावो - उजवो बाण", "बाण"],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("оңго-солго караган жебе"),
keywords: &["жебе", "оңго-солго караган жебе"],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("Feil no lénks a riets"),
keywords: &["Feil no lénks a riets", "Feiler"],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນຊ\u{ec9}າຍຂວາ"),
keywords: &[
"ຂວາ",
"ຊ\u{ec9}າຍ",
"ລ\u{eb9}ກສອນ",
"ລ\u{eb9}ກສອນຊ\u{ec9}າຍຂວາ",
],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("rodyklė į kairę ir į dešinę"),
keywords: &["rodyklė", "rodyklė į kairę ir į dešinę"],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("pa kreisi un pa labi vērsta bultiņa"),
keywords: &["bultiņa", "pa kreisi un pa labi vērsta bultiņa"],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pere mauī-matau"),
keywords: &["pere", "pere mauī-matau"],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("стрелка налево-надесно"),
keywords: &["десно", "лево", "стрелка", "стрелка налево-надесно"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some(
"ഇടത\u{d4d}തോട\u{d4d}ട\u{d41}ം വലത\u{d4d}തോട\u{d4d}ട\u{d41}മ\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളം",
),
keywords: &[
"അമ\u{d4d}പടയ\u{d3e}ളം",
"ഇടത\u{d4d}",
"ഇടത\u{d4d}തോട\u{d4d}ട\u{d41}ം വലത\u{d4d}തോട\u{d4d}ട\u{d41}മ\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളം",
"വലത\u{d4d}",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("зүүн-баруун сум"),
keywords: &["зүүн-баруун сум", "сум"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("डावा-उजवा दर\u{94d}शविणारा बाण"),
keywords: &["डावा-उजवा दर\u{94d}शविणारा बाण", "बाण"],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("anak panah ke kiri ke kanan"),
keywords: &["anak panah", "anak panah ke kiri ke kanan"],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("vleġġa ’l isfel xellugija"),
keywords: &["vleġġa", "vleġġa ’l isfel xellugija"],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("ဘယ\u{103a}−ညာပြ မြား"),
keywords: &["ဘယ\u{103a}−ညာ မြား", "ဘယ\u{103a}−ညာပြ မြား", "မြား"],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("pil venstre-høyre"),
keywords: &["pil", "pil venstre-høyre"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("बाया\u{901}-दाया\u{901} वाण"),
keywords: &["बाया\u{901}-दाया\u{901} वाण", "वाण"],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("pijl naar links en rechts"),
keywords: &["pijl", "pijl naar links en rechts"],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("pil venstre-høgre"),
keywords: &["pil", "pil venstre-høgre"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ବ\u{b3e}ମ-ଡ\u{b3e}ହ\u{b3e}ଣ ତୀର"),
keywords: &["ତୀର", "ବ\u{b3e}ମ-ଡ\u{b3e}ହ\u{b3e}ଣ ତୀର"],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਖ\u{a71}\u{a71}ਬ\u{a47}-ਸ\u{a71}ਜ\u{a47} ਤੀਰ"),
keywords: &["ਖ\u{a71}\u{a71}ਬ\u{a47}-ਸ\u{a71}ਜ\u{a47} ਤੀਰ", "ਤੀਰ"],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("کھبا-سجا تیر"),
keywords: &["تیر", "کھبا-سجا تیر"],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("Áro De Pọínt Lẹ\u{301}ft An Rait"),
keywords: &["Áro", "Áro De Pọínt Lẹ\u{301}ft An Rait"],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("strzałka w lewo i w prawo"),
keywords: &[
"dwukierunkowa",
"lewo-prawo",
"na boki",
"strzałka w lewo i w prawo",
],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("ښۍ چپ غشی"),
keywords: &["غشی", "ښۍ چپ غشی"],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("seta para esquerda e direita"),
keywords: &[
"horizontal",
"lados",
"seta",
"seta para esquerda e direita",
],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("seta para a esquerda e para a direita"),
keywords: &[
"direita",
"esquerda",
"seta",
"seta para a esquerda e para a direita",
],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("lluq’i paña wach’i"),
keywords: &["lluq’i paña wach’i"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("săgeată dreapta-stânga"),
keywords: &["săgeată", "săgeată dreapta-stânga"],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-006"),
keywords: &["E10-006"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("стрелка влево-вправо"),
keywords: &[
"влево и вправо",
"влево-вправо",
"двойная",
"стрелка",
"стрелка влево вправо",
],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("akambi kava ibumoso kajya iburyo"),
keywords: &["akambi kava ibumoso kajya iburyo", "umwambi"],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("ساڄي کاٻي پاسي تير"),
keywords: &["تير", "ساڄي کاٻي پاسي تير"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("වම\u{dca}-දක\u{dd4}ණ\u{dd4} ඊතලය"),
keywords: &["ඊතලය", "වම\u{dca}-දක\u{dd4}ණ\u{dd4} ඊතලය"],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("šípka doprava aj doľava"),
keywords: &["šípka", "šípka doprava aj doľava"],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("puščica levo desno"),
keywords: &["puščica", "puščica levo desno"],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("fallaarta bidix-midig"),
keywords: &["fallaar", "fallaarta bidix-midig"],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("shigjetë majtas-djathtas"),
keywords: &["shigjetë", "shigjetë majtas-djathtas"],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелица налево и надесно"),
keywords: &["стрeлицa", "стрелица налево и надесно"],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("стрелица налијево и надесно"),
keywords: &["стрелица налијево и надесно"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelica nalevo i nadesno"),
keywords: &["strelica", "strelica nalevo i nadesno"],
},
#[cfg(feature = "sr_Latn_BA")]
crate::Annotation {
lang: "sr_Latn_BA",
tts: Some("strelica nalijevo i nadesno"),
keywords: &["strelica nalijevo i nadesno"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("pil höger och vänster"),
keywords: &["höger", "pil", "pil höger och vänster", "vänster"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mshale unaoeleza kushoto na kulia"),
keywords: &["mshale", "mshale unaoeleza kushoto na kulia"],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some("இடது-வலது நோக\u{bcd}கிய அம\u{bcd}புக\u{bcd}குறி"),
keywords: &[
"அம\u{bcd}புக\u{bcd}குறி",
"இடது-வலது நோக\u{bcd}கிய அம\u{bcd}புக\u{bcd}குறி",
],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("ఎడమ మర\u{c3f}యు కుడ\u{c3f} బ\u{c3e}ణం"),
keywords: &[
"ఎడమ",
"ఎడమ మర\u{c3f}యు కుడ\u{c3f} బ\u{c3e}ణం",
"కుడ\u{c3f}",
"బ\u{c3e}ణం",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("тири чапу рост"),
keywords: &["тир", "тири чапу рост"],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("ล\u{e39}กศรช\u{e35}\u{e49}ซ\u{e49}ายขวา"),
keywords: &[
"ขวา",
"ซ\u{e49}าย",
"ล\u{e39}กศร",
"ล\u{e39}กศรช\u{e35}\u{e49}ซ\u{e49}ายขวา",
],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("ምልክት ናብ-የማን ጸጋም"),
keywords: &["ምልክት", "ምልክት ናብ-የማን ጸጋም"],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("çepe-saga ok"),
keywords: &["ok", "çepe-saga ok"],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau ki toʻohema mo toʻomataʻu"),
keywords: &[
"hema",
"mataʻu",
"ngahau",
"ngahau ki toʻohema mo toʻomataʻu",
],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("sol ve sağ ok"),
keywords: &["ok", "sol ve sağ ok"],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("ئوڭ-سول كۆرسەتكۈچ"),
keywords: &["ئوڭ-سول كۆرسەتكۈچ", "كۆرسەتكۈچ"],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("стрілка вліво-вправо"),
keywords: &[
"стрілка",
"стрілка вліво-вправо",
"стрілка праворуч і ліворуч",
],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("دائیں بائیں تیر"),
keywords: &["تیر", "دائیں بائیں تیر", "سائن", "سمت"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("chapga-o‘ngga strelka"),
keywords: &["chapga-o‘ngga strelka", "strelka"],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("mũi tên trái phải"),
keywords: &["mũi tên", "mũi tên trái phải"],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("fettu càmmoñu-ndijoor"),
keywords: &["fett", "fettu càmmoñu-ndijoor"],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("utolo olusekunene ekhohlo"),
keywords: &["utolo", "utolo olusekunene ekhohlo"],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà tọ\u{300}tún-tòsì"),
keywords: &[
"àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà tọ\u{300}tún-tòsì",
"ọfà",
],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("左右箭咀"),
keywords: &["左右箭咀", "箭咀"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("左右箭咀"),
keywords: &["左右箭咀", "箭咀"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("左右箭头"),
keywords: &["左右", "左右箭头", "箭头"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("左右箭頭"),
keywords: &["左右箭頭"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("左右箭嘴"),
keywords: &["左右箭嘴"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("umcibisholo okwesokunxele nesokudla"),
keywords: &["umcibisholo", "umcibisholo okwesokunxele nesokudla"],
},
],
};
#[doc = "↩\u{fe0f}"]
pub const RIGHT_ARROW_CURVING_LEFT: crate::Emoji = crate::Emoji {
glyph: "↩\u{fe0f}",
codepoint: "21A9 FE0F",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "right arrow curving left",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[crate::Emoji {
glyph: "↩",
codepoint: "21A9",
status: crate::Status::Unqualified,
introduction_version: 0.6f32,
name: "right arrow curving left",
group: "Symbols",
subgroup: "arrow",
is_variant: true,
variants: &[],
annotations: &[],
}],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("pyl na regs buig na links"),
keywords: &["pyl", "pyl na regs buig na links"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("ወደ ግራ ታጣፊ ቀኝ ጠቋሚ ቀስት"),
keywords: &["ቀስት", "ወደ ግራ ታጣፊ ቀኝ ጠቋሚ ቀስት"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("سهم أيمن بانحناء أيسر"),
keywords: &["سهم", "سهم أيمن بانحناء أيسر"],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some(
"ব\u{9be}ও\u{981}ফ\u{9be}ললৈ ভ\u{9be}\u{981}জ লোৱ\u{9be} সো\u{981}ম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}",
),
keywords: &[
"ক\u{9be}\u{981}ড\u{9bc}",
"ব\u{9be}ও\u{981}ফ\u{9be}ললৈ ভ\u{9be}\u{981}জ লোৱ\u{9be} সো\u{981}ম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("sola dönən sağ ox"),
keywords: &["ox", "sola dönən sağ ox"],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("стрэлка ўправа з паваротам улева"),
keywords: &[
"кірунак",
"рух",
"стрэлка",
"стрэлка ўправа з паваротам улева",
],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("извита стрелка наляво"),
keywords: &["извита стрелка наляво", "стрелка"],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("ড\u{9be}ন তীর ব\u{9be}দিকে ব\u{9be}\u{981}ক\u{9be}নো"),
keywords: &[
"ড\u{9be}ন তীর ব\u{9be}দিকে ব\u{9be}\u{981}ক\u{9be}নো",
"তীর",
],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelica desno zakrivljena nalijevo"),
keywords: &["strelica", "strelica desno zakrivljena nalijevo"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("fletxa dreta que gira cap a l’esquerra"),
keywords: &[
"dreta",
"esquerra",
"fletxa",
"fletxa dreta que gira cap a l’esquerra",
"gir",
],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎠᎦᏘᏏ ᎦᏝᏗ ᎠᏕᏲ ᎠᎦᏍᎦᏂ"),
keywords: &["ᎠᎦᏘᏏ ᎦᏝᏗ ᎠᏕᏲ ᎠᎦᏍᎦᏂ", "ᎦᏝᏗ"],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("šipka doprava stáčející se doleva"),
keywords: &["šipka", "šipka doprava stáčející se doleva"],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("saeth i’r dde yn troi i’r chwith"),
keywords: &["saeth", "saeth i’r dde yn troi i’r chwith"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("pil mod højre med sving til venstre"),
keywords: &["pil", "pil mod højre med sving til venstre"],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("geschwungener Pfeil nach links"),
keywords: &[
"Pfeil",
"geschwungen",
"geschwungener Pfeil nach links",
"links",
"nach links",
],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("δεξιό βέλος που στρίβει αριστερά"),
keywords: &["βέλος", "δεξιό βέλος που στρίβει αριστερά"],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("right arrow curving left"),
keywords: &["arrow", "right arrow curving left"],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flecha derecha curvándose a la izquierda"),
keywords: &[
"curva",
"dirección",
"flecha",
"flecha derecha curvándose a la izquierda",
"izquierda",
],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("flecha de retorno al oeste por la derecha"),
keywords: &["flecha de retorno al oeste por la derecha"],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &["flecha", "flecha derecha curvándose a la izquierda"],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("kaardus nool vasakule"),
keywords: &["kaardus nool vasakule", "nool", "vasak"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("ezkerrera okertzen den eskuinera gezia"),
keywords: &["ezkerrera okertzen den eskuinera gezia", "gezi"],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("پیکان خمیده راست"),
keywords: &["پیکان", "پیکان خمیده راست"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("vasemmalle kääntyvä nuoli"),
keywords: &["nuoli", "vasemmalle kääntyvä nuoli"],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("pakanang arrow na kumurba pakaliwa"),
keywords: &[
"arrow",
"direksyon",
"kurba",
"pakaliwa",
"pakanan",
"pakanang arrow na kumurba pakaliwa",
],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("bogin pílur sum peikar til vinstru"),
keywords: &["bogin", "bogin pílur sum peikar til vinstru", "pílur"],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèche courbe gauche"),
keywords: &["flèche", "flèche courbe gauche"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("flèche courbe vers la gauche"),
keywords: &[
"courbe vers la gauche",
"flèche",
"flèche courbe vers la gauche",
],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("saighead dheas ag dul ar chlé"),
keywords: &["saighead", "saighead dheas ag dul ar chlé"],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("saighead dheas a’ lùbadh dhan taobh chlì"),
keywords: &["saighead", "saighead dheas a’ lùbadh dhan taobh chlì"],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frecha curvada cara á esquerda"),
keywords: &[
"curva",
"esquerda",
"frecha",
"frecha curvada cara á esquerda",
],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("ડાબ\u{ac7} વળત\u{ac1}\u{a82} જમણ\u{ac1}\u{a82} તીર"),
keywords: &["ડાબ\u{ac7} વળત\u{ac1}\u{a82} જમણ\u{ac1}\u{a82} તીર", "તીર"],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("kibiyar dama mai lanƙwasa ta hagu"),
keywords: &["kibiya", "kibiyar dama mai lanƙwasa ta hagu"],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("חץ ימינה מתעקל שמאלה"),
keywords: &["חץ", "חץ ימינה מתעקל שמאלה", "מתעקל", "פרסה", "שמאלה"],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("बाई\u{902} ओर म\u{941}ड\u{93c}ा दाया\u{901} तीर"),
keywords: &["तीर", "बाई\u{902} ओर म\u{941}ड\u{93c}ा दाया\u{901} तीर"],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("desna strelica koja se zakrivljuje ulijevo"),
keywords: &["desna strelica koja se zakrivljuje ulijevo", "strelica"],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("balra görbülő jobb nyíl"),
keywords: &["balra görbülő jobb nyíl", "nyíl"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("աջ շրջադարձի սլաք"),
keywords: &["աջ շրջադարձի սլաք", "սլաք"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah kanan melengkung ke kiri"),
keywords: &["panah", "tanda panah kanan melengkung ke kiri"],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("ube-akanri gbagọrọ akaekpe"),
keywords: &["ube", "ube-akanri gbagọrọ akaekpe"],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("sveigð ör til vinstri"),
keywords: &["sveigð ör til vinstri", "ör"],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("freccia curva a sinistra"),
keywords: &[
"curva",
"freccia",
"freccia curva a sinistra",
"verso sinistra",
],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("右カーブ矢印"),
keywords: &["カーブ", "右カーブ矢印", "左", "曲線", "矢印"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("panah nengen menggok ngiwa"),
keywords: &["panah", "panah nengen menggok ngiwa"],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ისარი, რომელიც იმრუდება მარჯვნიდან მარცხნივ"),
keywords: &["ისარი", "ისარი, რომელიც იმრუდება მარჯვნიდან მარცხნივ"],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("aneccab ayeffus yettin s azelmaḍ"),
keywords: &["aneccab ayeffus yettin s azelmaḍ"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("оң жақтан шығып, сол жаққа бұрылатын көрсеткі"),
keywords: &["көрсеткі", "оң жақтан шығып, сол жаққа бұрылатын көрсеткі"],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("pil mod højre med sving til venstre"),
keywords: &["pil", "pil mod højre med sving til venstre"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some(
"ព\u{17d2}រ\u{17bd}ញកោងត\u{17d2}រឡប\u{17cb}ទៅឆ\u{17d2}វេងក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}នជ\u{17d2}រ\u{17bb}ង",
),
keywords: &[
"ទ\u{17b7}ស",
"ទ\u{17b7}សដៅ",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញកោងត\u{17d2}រឡប\u{17cb}ទៅឆ\u{17d2}វេងក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}នជ\u{17d2}រ\u{17bb}ង",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಎಡಕ\u{ccd}ಕ\u{cc6} ತ\u{cbf}ರುಗುವ ಬಲ ಬಾಣ"),
keywords: &["ಎಡಕ\u{ccd}ಕ\u{cc6} ತ\u{cbf}ರುಗುವ ಬಲ ಬಾಣ", "ಬಾಣ"],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("왼쪽으로 꺾어지는 우향 화살표"),
keywords: &["왼쪽으로 꺾어지는 우향 화살표", "좌회전", "화살표"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("उजवो बाण दाव\u{94d}यान वळा"),
keywords: &["उजवो बाण दाव\u{94d}यान वळा", "बाण"],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("оңдон солго ийилген жебе"),
keywords: &["жебе", "оңдон солго ийилген жебе"],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("Rietsfeil mat Lénkskéier"),
keywords: &["Feiler", "Rietsfeil mat Lénkskéier"],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນໂຄ\u{ec9}ງຂວາໄປຊ\u{ec9}າຍ"),
keywords: &[
"ຊ\u{ec9}າຍ",
"ລ\u{eb9}ກສອນ",
"ລ\u{eb9}ກສອນໂຄ\u{ec9}ງຂວາໄປຊ\u{ec9}າຍ",
"ໂຄ\u{ec9}ງ",
],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("rodyklė į dešinę, užlenkta į kairę"),
keywords: &["rodyklė", "rodyklė į dešinę, užlenkta į kairę"],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("pa kreisi vērsta izliekta bultiņa"),
keywords: &["bultiņa", "pa kreisi vērsta izliekta bultiņa"],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pere matau piko mauī"),
keywords: &["pere", "pere matau piko mauī"],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("стрелка надесно што врти лево"),
keywords: &["десно", "лево", "стрелка", "стрелка надесно што врти лево"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some("ഇടത\u{d4d}തേക\u{d4d}ക\u{d4d} വളഞ\u{d4d}ഞ വലത\u{d4d} അമ\u{d4d}പടയ\u{d3e}ളം"),
keywords: &[
"അമ\u{d4d}പടയ\u{d3e}ളം",
"ഇടത\u{d4d}തേക\u{d4d}ക\u{d4d} വളഞ\u{d4d}ഞ വലത\u{d4d} അമ\u{d4d}പടയ\u{d3e}ളം",
"വലത\u{d4d}ത\u{d4d}",
"വളവ\u{d4d}",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("зүүн эргэсэн баруун сум"),
keywords: &["баруун", "зүүн", "зүүн эргэсэн баруун сум", "сум", "эргэх"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("उजवा डावीकड\u{947} वळल\u{947}ला बाण"),
keywords: &[
"उजवा डावीकड\u{947} वळल\u{947}ला बाण",
"उजवा-डावीकड\u{947} वळल\u{947}ला बाण",
"बाण",
],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("anak panah ke kanan melengkung ke kiri"),
keywords: &["anak panah", "anak panah ke kanan melengkung ke kiri"],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("vleġġa leminija mgħawġa lejn ix-xellug"),
keywords: &["vleġġa", "vleġġa leminija mgħawġa lejn ix-xellug"],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("ညာည\u{103d}\u{103e}န\u{103a} ဘယ\u{103a}ဝ\u{102d}\u{102f}က\u{103a} မြား"),
keywords: &[
"ညာည\u{103d}\u{103e}န\u{103a} ဘယ\u{103a}ဝ\u{102d}\u{102f}က\u{103a} မြား",
"ဘယ\u{103a}က\u{103d}ေ\u{1037} ညာဖက\u{103a} မြား",
"မြား",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("pil som bøyer mot venstre"),
keywords: &["pil", "pil som bøyer mot venstre"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("बाया\u{901} मोड\u{947}को दाया\u{901} वाण"),
keywords: &["बाया\u{901} मोड\u{947}को दाया\u{901} वाण", "वाण"],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("pijl naar rechts die naar links draait"),
keywords: &["pijl", "pijl naar rechts die naar links draait"],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("pil som bøyer mot venstre"),
keywords: &["pil", "pil som bøyer mot venstre"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ଡ\u{b3e}ହ\u{b3e}ଣ ତୀର ବ\u{b3e}ମକ\u{b41} ମୋଡ\u{b3c}\u{b3f} ହେଉଛ\u{b3f}"),
keywords: &[
"ଡ\u{b3e}ହ\u{b3e}ଣ ତୀର ବ\u{b3e}ମକ\u{b41} ମୋଡ\u{b3c}\u{b3f} ହେଉଛ\u{b3f}",
"ତୀର",
],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਸ\u{a71}\u{a71}ਜ\u{a47} ਮ\u{a41}ੜਨ ਵਾਲਾ ਖ\u{a71}ਬਾ ਤੀਰ"),
keywords: &[
"ਤੀਰ",
"ਸ\u{a71}\u{a71}ਜ\u{a47} ਮ\u{a41}ੜਨ ਵਾਲਾ ਖ\u{a71}ਬਾ ਤੀਰ",
],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("سجا تیر کھبے مڑدیاں ہوئیاں"),
keywords: &["تیر", "سجا تیر کھبے مڑدیاں ہوئیاں"],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("Raít Áro Kọ\u{301}v Gó Lẹft"),
keywords: &["Raít Áro Kọ\u{301}v Gó Lẹft", "Áro"],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("strzałka zakręcona w lewo"),
keywords: &["strzałka", "strzałka zakręcona w lewo", "zakręt", "zawróć"],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("ښۍ غشی چپ اړخ ته کوږ"),
keywords: &["غشی", "ښۍ غشی چپ اړخ ته کوږ"],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("seta curva da direita para a esquerda"),
keywords: &[
"curva",
"retorno",
"seta",
"seta curva da direita para a esquerda",
"voltar à esquerda",
],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("seta para a direita com curva para a esquerda"),
keywords: &[
"curva",
"esquerda",
"seta",
"seta para a direita com curva para a esquerda",
],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("paña wach’i llink’uwan lluq’iman"),
keywords: &["paña wach’i llink’uwan lluq’iman"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("săgeată dreapta curbată spre stânga"),
keywords: &["săgeată", "săgeată dreapta curbată spre stânga"],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-012"),
keywords: &["E10-012"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("стрелка вправо с поворотом влево"),
keywords: &[
"изгиб",
"изогнутая стрелка",
"поворот влево",
"стрелка вправо",
"стрелка вправо с поворотом влево",
],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("umwambi w’iburyo uhetamiye ibumoso"),
keywords: &["umwambi", "umwambi w’iburyo uhetamiye ibumoso"],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("ساڄي تير مڙندي کاٻي طرف"),
keywords: &["تير", "ساڄي تير مڙندي کاٻي طرف"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("වමට රව\u{dd4}ම\u{dca} ව\u{dd6} දක\u{dd4}ණ\u{dd4} ඊතලය"),
keywords: &[
"ඊතලය",
"වමට රව\u{dd4}ම\u{dca} ව\u{dd6} දක\u{dd4}ණ\u{dd4} ඊතලය",
],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("šípka doprava zakrivená doľava"),
keywords: &["šípka", "šípka doprava zakrivená doľava"],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("puščica desno, ki se ukrivlja v levo"),
keywords: &["puščica", "puščica desno, ki se ukrivlja v levo"],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("fallaarta midig u qalloocsan bidix"),
keywords: &["fallaar", "fallaarta midig u qalloocsan bidix"],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("shigjetë djathtas e përkulur majtas"),
keywords: &["shigjetë", "shigjetë djathtas e përkulur majtas"],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелица за полукружни окрет удесно"),
keywords: &["стрeлицa", "стрелица за полукружни окрет удесно"],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelica za polukružni okret udesno"),
keywords: &["strelica", "strelica za polukružni okret udesno"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("svängd vänsterpil"),
keywords: &["pil", "sväng", "svängd vänsterpil", "vänster"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mshale wa kulia unaopinda kushoto"),
keywords: &["mshale", "mshale wa kulia unaopinda kushoto"],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some(
"இடப\u{bcd}பக\u{bcd}கம\u{bcd} வளைந\u{bcd}திருக\u{bcd}கும\u{bcd} வலது அம\u{bcd}புக\u{bcd}குறி",
),
keywords: &[
"அம\u{bcd}புக\u{bcd}குறி",
"இடப\u{bcd}பக\u{bcd}கம\u{bcd} வளைந\u{bcd}த வலது அம\u{bcd}புக\u{bcd}குறி",
"இடப\u{bcd}பக\u{bcd}கம\u{bcd} வளைந\u{bcd}திருக\u{bcd}கும\u{bcd} வலது அம\u{bcd}புக\u{bcd}குறி",
],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("ఎడమవ\u{c48}పు వంపు త\u{c3f}ర\u{c3f}గ\u{c3f}న కుడ\u{c3f} బ\u{c3e}ణం"),
keywords: &[
"ఎడమవ\u{c48}పు",
"ఎడమవ\u{c48}పు వంపు త\u{c3f}ర\u{c3f}గ\u{c3f}న కుడ\u{c3f} బ\u{c3e}ణం",
"కుడ\u{c3f}",
"బ\u{c3e}ణం",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("тири рости ба тарафи чап каҷшаванда"),
keywords: &["тир", "тири рости ба тарафи чап каҷшаванда"],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("ล\u{e39}กศรวนซ\u{e49}าย"),
keywords: &["ซ\u{e49}าย", "ล\u{e39}กศร", "ล\u{e39}กศรวนซ\u{e49}าย", "วน"],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("የማናይ ምልክት ናብ ጸጋም ዝተጠውየ"),
keywords: &["ምልክት", "የማናይ ምልክት ናብ ጸጋም ዝተጠውየ"],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("saga ok, çepe egrelýän"),
keywords: &["ok", "saga ok, çepe egrelýän"],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau afe ki toʻohema"),
keywords: &["afe", "hema", "ngahau", "ngahau afe ki toʻohema"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("sola kıvrımlı sağ ok"),
keywords: &["ok", "sola kıvrımlı sağ ok"],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("سولغا ئېگىلگەن ئوڭ كۆرسەتكۈچ"),
keywords: &["سولغا ئېگىلگەن ئوڭ كۆرسەتكۈچ", "كۆرسەتكۈچ"],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("стрілка вправо з вигином уліво"),
keywords: &[
"стрілка",
"стрілка вправо з вигином уліво",
"стрілка праворуч із поворотом ліворуч",
],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("بائیں مڑتا ہوا دائیں تیر"),
keywords: &["بائیں مڑتا ہوا دائیں تیر", "تیر", "سائن", "سمت"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("o‘ngdan chapga qayrilish"),
keywords: &["o‘ngdan chapga qayrilish", "strelka"],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("mũi tên phải cong sang trái"),
keywords: &["mũi tên", "mũi tên phải cong sang trái"],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("fettu ndijoor lemoo càmmoñ"),
keywords: &["fett", "fettu ndijoor lemoo càmmoñ"],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("utolo olusekunene olugobe ekhohlo"),
keywords: &["utolo", "utolo olusekunene olugobe ekhohlo"],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà apá ọ\u{300}tún onígun"),
keywords: &[
"àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà apá ọ\u{300}tún onígun",
"ọfà",
],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("向右箭咀彎向左"),
keywords: &["向右箭咀彎向左", "箭咀"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("向右箭咀弯向左"),
keywords: &["向右箭咀弯向左", "箭咀"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("右转弯箭头"),
keywords: &["右转弯", "右转弯箭头", "向左弯曲的右箭头", "箭头"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("向左彎的右箭頭"),
keywords: &["向左彎的右箭頭"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("向左彎嘅右箭嘴"),
keywords: &["向左彎嘅右箭嘴"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("umcibisholo okhombe kwesokudla ogobe kwesokunxele"),
keywords: &[
"umcibisholo",
"umcibisholo okhombe kwesokudla ogobe kwesokunxele",
],
},
],
};
#[doc = "↪\u{fe0f}"]
pub const LEFT_ARROW_CURVING_RIGHT: crate::Emoji = crate::Emoji {
glyph: "↪\u{fe0f}",
codepoint: "21AA FE0F",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "left arrow curving right",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[crate::Emoji {
glyph: "↪",
codepoint: "21AA",
status: crate::Status::Unqualified,
introduction_version: 0.6f32,
name: "left arrow curving right",
group: "Symbols",
subgroup: "arrow",
is_variant: true,
variants: &[],
annotations: &[],
}],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("pyl na links buig na regs"),
keywords: &["pyl", "pyl na links buig na regs"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("ወደ ቀኝ ታጣፊ ግራ ጠቋሚ ቀስት"),
keywords: &["ቀስት", "ወደ ቀኝ ታጣፊ ግራ ጠቋሚ ቀስት"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("سهم أيسر بانحناء أيمن"),
keywords: &["سهم", "سهم أيسر بانحناء أيمن"],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some(
"সো\u{981}ফ\u{9be}ললৈ ভ\u{9be}\u{981}জ লোৱ\u{9be} ব\u{9be}ও\u{981}ম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}",
),
keywords: &[
"ক\u{9be}\u{981}ড\u{9bc}",
"সো\u{981}ফ\u{9be}ললৈ ভ\u{9be}\u{981}জ লোৱ\u{9be} ব\u{9be}ও\u{981}ম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("sağa dönən sol ox"),
keywords: &["ox", "sağa dönən sol ox"],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("стрэлка ўлева з паваротам управа"),
keywords: &[
"кірунак",
"рух",
"стрэлка",
"стрэлка ўлева з паваротам управа",
],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("извита стрелка надясно"),
keywords: &["извита стрелка надясно", "стрелка"],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("ব\u{9be}ম তীর ড\u{9be}ন দিকে ব\u{9be}\u{981}ক\u{9be}নো"),
keywords: &[
"তীর",
"ব\u{9be}ম তীর ড\u{9be}ন দিকে ব\u{9be}\u{981}ক\u{9be}নো",
],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelica lijevo zakrivljena nadesno"),
keywords: &["strelica", "strelica lijevo zakrivljena nadesno"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("fletxa esquerra que gira cap a la dreta"),
keywords: &[
"dreta",
"esquerra",
"fletxa",
"fletxa esquerra que gira cap a la dreta",
"gir",
],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎠᎦᏍᎦᏂ ᎦᏝᏗ ᎠᏕᏲ ᎠᎦᏘᏏ"),
keywords: &["ᎠᎦᏍᎦᏂ ᎦᏝᏗ ᎠᏕᏲ ᎠᎦᏘᏏ", "ᎦᏝᏗ"],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("šipka doleva stáčející se doprava"),
keywords: &["šipka", "šipka doleva stáčející se doprava"],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("saeth i’r chwith yn troi i’r dde"),
keywords: &["saeth", "saeth i’r chwith yn troi i’r dde"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("pil mod venstre med sving til højre"),
keywords: &["pil", "pil mod venstre med sving til højre"],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("geschwungener Pfeil nach rechts"),
keywords: &[
"Pfeil",
"geschwungen",
"geschwungener Pfeil nach rechts",
"nach rechts",
"rechts",
],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("αριστερό βέλος που στρίβει δεξιά"),
keywords: &["αριστερό βέλος που στρίβει δεξιά", "βέλος"],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("left arrow curving right"),
keywords: &["arrow", "left arrow curving right"],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flecha izquierda curvándose a la derecha"),
keywords: &[
"curva",
"derecha",
"dirección",
"flecha",
"flecha izquierda curvándose a la derecha",
],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("flecha de retorno al este por la izquierda"),
keywords: &["flecha de retorno al este por la izquierda"],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &[
"flecha",
"flecha izquierda curvándose a la derecha",
"flecha izquierda curvándose a la izquierda",
],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("kaardus nool paremale"),
keywords: &["kaardus nool paremale", "nool", "parem"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("eskuinera okertzen den ezkerrera gezia"),
keywords: &["eskuinera okertzen den ezkerrera gezia", "gezi"],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("پیکان خمیده چپ"),
keywords: &["پیکان", "پیکان خمیده چپ"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("oikealle kääntyvä nuoli"),
keywords: &["nuoli", "oikealle kääntyvä nuoli"],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("pakaliwang arrow na kumurba pakanan"),
keywords: &[
"arrow",
"direksyon",
"kurba",
"pakaliwa",
"pakaliwang arrow na kumurba pakanan",
],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("bogin pílur sum peikar til høgru"),
keywords: &["bogin", "bogin pílur sum peikar til høgru", "pílur"],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèche courbe droite"),
keywords: &["flèche", "flèche courbe droite"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("flèche courbe vers la droite"),
keywords: &[
"courbe vers la droite",
"flèche",
"flèche courbe vers la droite",
],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("saighead chlé ag dul ar dheis"),
keywords: &["saighead", "saighead chlé ag dul ar dheis"],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("saighead chlì a’ lùbadh dhan taobh deas"),
keywords: &["saighead", "saighead chlì a’ lùbadh dhan taobh deas"],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frecha curvada cara á dereita"),
keywords: &[
"curva",
"dereita",
"frecha",
"frecha curvada cara á dereita",
],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("જમણ\u{ac7} વળત\u{ac1}\u{a82} ડાબ\u{ac1}\u{a82} તીર"),
keywords: &["જમણ\u{ac7} વળત\u{ac1}\u{a82} ડાબ\u{ac1}\u{a82} તીર", "તીર"],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("kibiyar hagu mai lanƙwasa ta dama"),
keywords: &["kibiya", "kibiyar hagu mai lanƙwasa ta dama"],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("חץ שמאלה מתעקל ימינה"),
keywords: &["חץ", "חץ שמאלה מתעקל ימינה"],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("दाई\u{902} ओर म\u{941}ड\u{93c}ा बाया\u{901} तीर"),
keywords: &["तीर", "दाई\u{902} ओर म\u{941}ड\u{93c}ा बाया\u{901} तीर"],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("lijeva strelica koja se zakrivljuje udesno"),
keywords: &["lijeva strelica koja se zakrivljuje udesno", "strelica"],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("jobbra görbülő bal nyíl"),
keywords: &["jobbra görbülő bal nyíl", "nyíl"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("ձախ շրջադարձի սլաք"),
keywords: &["ձախ շրջադարձի սլաք", "սլաք"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah kanan melengkung ke kanan"),
keywords: &["panah", "tanda panah kanan melengkung ke kanan"],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("ube-akaekpe gbagọrọ akanri"),
keywords: &["ube", "ube-akaekpe gbagọrọ akanri"],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("sveigð ör til hægri"),
keywords: &["sveigð ör til hægri", "ör"],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("freccia curva a destra"),
keywords: &["curva", "freccia", "freccia curva a destra", "verso destra"],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("左カーブ矢印"),
keywords: &["カーブ", "右", "左カーブ矢印", "曲線", "矢印"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("panah ngiwa menggok nengen"),
keywords: &["panah", "panah ngiwa menggok nengen"],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ისარი, რომელიც იმრუდება მარცნიდან მარჯვნივ"),
keywords: &["ისარი", "ისარი, რომელიც იმრუდება მარცნიდან მარჯვნივ"],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("aneccab azelmaḍ yettin s ayeffus"),
keywords: &["aneccab azelmaḍ yettin s ayeffus"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("сол жақтан шығып, оң жаққа бұрылатын көрсеткі"),
keywords: &["көрсеткі", "сол жақтан шығып, оң жаққа бұрылатын көрсеткі"],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("pil mod venstre med sving til højre"),
keywords: &["pil", "pil mod venstre med sving til højre"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some(
"ព\u{17d2}រ\u{17bd}ញកោងត\u{17d2}រឡប\u{17cb}ទៅស\u{17d2}តា\u{17c6}ក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}នជ\u{17d2}រ\u{17bb}ង",
),
keywords: &[
"ទ\u{17b7}ស",
"ទ\u{17b7}សដៅ",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញកោងត\u{17d2}រឡប\u{17cb}ទៅស\u{17d2}តា\u{17c6}ក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}នជ\u{17d2}រ\u{17bb}ង",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಬಲಕ\u{ccd}ಕ\u{cc6} ತ\u{cbf}ರುಗುವ ಎಡ ಬಾಣ"),
keywords: &["ಬಲಕ\u{ccd}ಕ\u{cc6} ತ\u{cbf}ರುಗುವ ಎಡ ಬಾಣ", "ಬಾಣ"],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("오른쪽으로 꺾어지는 좌향 화살표"),
keywords: &["오른쪽으로 꺾어지는 좌향 화살표", "우회전", "화살표"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("दावो बाण उजव\u{94d}यान वळा"),
keywords: &["दावो बाण उजव\u{94d}यान वळा", "बाण"],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("солдон оңго ийилген жебе"),
keywords: &["жебе", "солдон оңго ийилген жебе"],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("Lénksfeil mat Rietskéier"),
keywords: &["Feiler", "Lénksfeil mat Rietskéier"],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນໂຄ\u{ec9}ງຊ\u{ec9}າຍໄປຂວາ"),
keywords: &[
"ຂວາ",
"ລ\u{eb9}ກສອນ",
"ລ\u{eb9}ກສອນໂຄ\u{ec9}ງຊ\u{ec9}າຍໄປຂວາ",
"ໂຄ\u{ec9}ງ",
],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("rodyklė į kairę, užlenkta į dešinę"),
keywords: &["rodyklė", "rodyklė į kairę, užlenkta į dešinę"],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("pa labi vērsta izliekta bultiņa"),
keywords: &["bultiņa", "pa labi vērsta izliekta bultiņa"],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pere mauī piko matau"),
keywords: &["pere", "pere mauī piko matau"],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("стрелка налево што врти десно"),
keywords: &["десно", "лево", "стрелка", "стрелка налево што врти десно"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some("വലത\u{d4d}തേക\u{d4d}ക\u{d4d} വളഞ\u{d4d}ഞ ഇടത\u{d4d} അമ\u{d4d}പടയ\u{d3e}ളം"),
keywords: &[
"അമ\u{d4d}പടയ\u{d3e}ളം",
"ഇടത\u{d4d}ത\u{d4d}",
"വലത\u{d4d}തേക\u{d4d}ക\u{d4d} വളഞ\u{d4d}ഞ ഇടത\u{d4d} അമ\u{d4d}പടയ\u{d3e}ളം",
"വളവ\u{d4d}",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("баруун эргэсэн зүүн сум"),
keywords: &["баруун", "баруун эргэсэн зүүн сум", "зүүн", "сум", "эргэх"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("डावा उजवीकड\u{947} वळल\u{947}ला बाण"),
keywords: &[
"डावा उजवीकड\u{947} वळल\u{947}ला बाण",
"डावा-उजवीकड\u{947} वळल\u{947}ला बाण",
"बाण",
],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("anak panah ke kiri melengkung ke kanan"),
keywords: &["anak panah", "anak panah ke kiri melengkung ke kanan"],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("vleġġa tax-xellug mgħawġa lejn il-lemin"),
keywords: &["vleġġa", "vleġġa tax-xellug mgħawġa lejn il-lemin"],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("ဘယ\u{103a}ည\u{103d}\u{103e}န\u{103a} ညာဝ\u{102d}\u{102f}က\u{103a} မြား"),
keywords: &[
"ညာက\u{103d}ေ\u{1037} ဘယ\u{103a}ဖက\u{103a} မြား",
"ဘယ\u{103a}ည\u{103d}\u{103e}န\u{103a} ညာဝ\u{102d}\u{102f}က\u{103a} မြား",
"မြား",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("pil som bøyer mot høyre"),
keywords: &["pil", "pil som bøyer mot høyre"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("दाया\u{901} मोड\u{947}को बाया\u{901} वाण"),
keywords: &["दाया\u{901} मोड\u{947}को बाया\u{901} वाण", "वाण"],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("pijl naar links die naar rechts draait"),
keywords: &["pijl", "pijl naar links die naar rechts draait"],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("pil som bøyer mot høgre"),
keywords: &["pil", "pil som bøyer mot høgre"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ବ\u{b3e}ମ ତୀର ଡ\u{b3e}ହ\u{b3e}ଣକ\u{b41} ମୋଡ\u{b3c}\u{b3f} ହେଉଛ\u{b3f}"),
keywords: &[
"ତୀର",
"ବ\u{b3e}ମ ତୀର ଡ\u{b3e}ହ\u{b3e}ଣକ\u{b41} ମୋଡ\u{b3c}\u{b3f} ହେଉଛ\u{b3f}",
],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਖ\u{a71}ਬ\u{a47}\u{a47} ਮ\u{a41}ੜਨ ਵਾਲਾ ਸ\u{a71}ਜਾ ਤੀਰ"),
keywords: &[
"ਖ\u{a71}ਬ\u{a47}\u{a47} ਮ\u{a41}ੜਨ ਵਾਲਾ ਸ\u{a71}ਜਾ ਤੀਰ",
"ਤੀਰ",
],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("کھبا تیر سجے مڑدیاں ہوئیاں"),
keywords: &["تیر", "کھبا تیر سجے مڑدیاں ہوئیاں"],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("Lẹ\u{301}ft Áro Kọ\u{301}v Gó Raít"),
keywords: &[
"Lẹ\u{301}ft Áro Kọ\u{301}v Gó Rait",
"Lẹ\u{301}ft Áro Kọ\u{301}v Gó Raít",
"Áro",
],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("strzałka zakręcona w prawo"),
keywords: &["strzałka", "strzałka zakręcona w prawo", "zakręt", "zawróć"],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("چپ غشی ښۍ اړخ ته کوږ"),
keywords: &["غشی", "چپ غشی ښۍ اړخ ته کوږ"],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("seta curva da esquerda para a direita"),
keywords: &[
"curva",
"retorno",
"seta",
"seta curva da esquerda para a direita",
"voltar à direita",
],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("seta para a esquerda com curva para a direita"),
keywords: &[
"curva",
"direita",
"seta",
"seta para a esquerda com curva para a direita",
],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("lluq’i wach’i llink’uwan pañaman"),
keywords: &["lluq’i wach’i llink’uwan pañaman"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("săgeată stânga curbată spre dreapta"),
keywords: &["săgeată", "săgeată stânga curbată spre dreapta"],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-013"),
keywords: &["E10-013"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("стрелка влево с поворотом вправо"),
keywords: &[
"изгиб",
"изогнутая стрелка",
"поворот вправо",
"стрелка влево",
"стрелка влево с поворотом вправо",
],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("umwambi w’ibumoso uhetamiye iburyo"),
keywords: &["umwambi", "umwambi w’ibumoso uhetamiye iburyo"],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("کاٻي تير مڙندي ساڄي طرف"),
keywords: &["تير", "کاٻي تير مڙندي ساڄي طرف"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("දක\u{dd4}ණට රව\u{dd4}ම\u{dca} ව\u{dd6} වම\u{dca} ඊතලය"),
keywords: &[
"ඊතලය",
"දක\u{dd4}ණට රව\u{dd4}ම\u{dca} ව\u{dd6} වම\u{dca} ඊතලය",
],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("šípka doľava zakrivená doprava"),
keywords: &["šípka", "šípka doľava zakrivená doprava"],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("puščica levo, ki se ukrivlja v desno"),
keywords: &["puščica", "puščica levo, ki se ukrivlja v desno"],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("fallaarta bidix u qalloocsan midig"),
keywords: &["fallaar", "fallaarta bidix u qalloocsan midig"],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("shigjeta majtas me hark djathtas"),
keywords: &["shigjeta majtas me hark djathtas", "shigjetë"],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелица за полукружни окрет улево"),
keywords: &["стрeлицa", "стрелица за полукружни окрет улево"],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("стрелица за полукружни окрет улијево"),
keywords: &["стрелица за полукружни окрет улијево"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelica za polukružni okret ulevo"),
keywords: &["strelica", "strelica za polukružni okret ulevo"],
},
#[cfg(feature = "sr_Latn_BA")]
crate::Annotation {
lang: "sr_Latn_BA",
tts: Some("strelica za polukružni okret ulijevo"),
keywords: &["strelica za polukružni okret ulijevo"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("svängd högerpil"),
keywords: &["höger", "pil", "sväng", "svängd högerpil"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mshale wa kushoto unaopinda kulia"),
keywords: &["mshale", "mshale wa kushoto unaopinda kulia"],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some(
"வலப\u{bcd}பக\u{bcd}கம\u{bcd} வளைந\u{bcd}திருக\u{bcd}கும\u{bcd} இடது அம\u{bcd}புக\u{bcd}குறி",
),
keywords: &[
"அம\u{bcd}புக\u{bcd}குறி",
"வலப\u{bcd}பக\u{bcd}கம\u{bcd} வளைந\u{bcd}த இடது அம\u{bcd}புக\u{bcd}குறி",
"வலப\u{bcd}பக\u{bcd}கம\u{bcd} வளைந\u{bcd}திருக\u{bcd}கும\u{bcd} இடது அம\u{bcd}புக\u{bcd}குறி",
],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("కుడ\u{c3f}వ\u{c48}పు వంపు త\u{c3f}ర\u{c3f}గ\u{c3f}న ఎడమ బ\u{c3e}ణం"),
keywords: &[
"ఎడమ",
"కుడ\u{c3f}వ\u{c48}పు",
"కుడ\u{c3f}వ\u{c48}పు వంపు త\u{c3f}ర\u{c3f}గ\u{c3f}న ఎడమ బ\u{c3e}ణం",
"బ\u{c3e}ణం",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("тири чапи ба тарафи рост каҷшаванда"),
keywords: &["тир", "тири чапи ба тарафи рост каҷшаванда"],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("ล\u{e39}กศรวนขวา"),
keywords: &["ขวา", "ล\u{e39}กศร", "ล\u{e39}กศรวนขวา", "วน"],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("ጸጋማይ ምልክት ናብ የማን ዝተጠውየ"),
keywords: &["ምልክት", "ጸጋማይ ምልክት ናብ የማን ዝተጠውየ"],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("çepe ok, saga egrelýän"),
keywords: &["ok", "çepe ok, saga egrelýän"],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau afe ki toʻomataʻu"),
keywords: &["afe", "mataʻu", "ngahau", "ngahau afe ki toʻomataʻu"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("sağa kıvrımlı sol ok"),
keywords: &["ok", "sağa kıvrımlı sol ok"],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("ئوڭغا ئېگىلگەن سول كۆرسەتكۈچ"),
keywords: &["ئوڭغا ئېگىلگەن سول كۆرسەتكۈچ", "كۆرسەتكۈچ"],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("стрілка вліво з вигином управо"),
keywords: &[
"стрілка",
"стрілка вліво з вигином управо",
"стрілка ліворуч із поворотом праворуч",
],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("دائیں مڑتا ہوا بائیں تیر"),
keywords: &["تیر", "دائیں مڑتا ہوا بائیں تیر", "سائن", "سمت"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("chapdan o‘ngga qayrilish"),
keywords: &["chapdan o‘ngga qayrilish", "strelka"],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("mũi tên trái cong sang phải"),
keywords: &["mũi tên", "mũi tên trái cong sang phải"],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("fettu càmmoñ lemoo ndijoor"),
keywords: &["fett", "fettu càmmoñ lemoo ndijoor"],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("utolo olusekhohlo olugobe ekunene"),
keywords: &["utolo", "utolo olusekhohlo olugobe ekunene"],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà apá òsì onígun"),
keywords: &[
"àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà apá òsì onígun",
"ọfà",
],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("向左箭咀彎向右"),
keywords: &["向左箭咀彎向右", "箭咀"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("向左箭咀弯向右"),
keywords: &["向左箭咀弯向右", "箭咀"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("左转弯箭头"),
keywords: &["向右弯曲的左箭头", "左转弯", "左转弯箭头", "箭头"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("向右彎的左箭頭"),
keywords: &["向右彎的左箭頭"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("向右彎嘅左箭嘴"),
keywords: &["向右彎嘅左箭嘴"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("umcibisholo okhombe kwesokunxele ogobe kwesokudla"),
keywords: &[
"umcibisholo",
"umcibisholo okhombe kwesokunxele ogobe kwesokudla",
],
},
],
};
#[doc = "⤴\u{fe0f}"]
pub const RIGHT_ARROW_CURVING_UP: crate::Emoji = crate::Emoji {
glyph: "⤴\u{fe0f}",
codepoint: "2934 FE0F",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "right arrow curving up",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[crate::Emoji {
glyph: "⤴",
codepoint: "2934",
status: crate::Status::Unqualified,
introduction_version: 0.6f32,
name: "right arrow curving up",
group: "Symbols",
subgroup: "arrow",
is_variant: true,
variants: &[],
annotations: &[],
}],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("pyl na regs buig op"),
keywords: &["pyl", "pyl na regs buig op"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("ወደ ላይ ታጣፊ ቀኝ ጠቋሚ ቀስት"),
keywords: &["ቀስት", "ወደ ላይ ታጣፊ ቀኝ ጠቋሚ ቀስት"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("سهم لأعلى من اليسار"),
keywords: &["سهم", "سهم لأعلى من اليسار"],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some(
"ওপৰলৈ ভ\u{9be}\u{981}জ লোৱ\u{9be} সো\u{981}ম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}",
),
keywords: &[
"ওপৰলৈ ভ\u{9be}\u{981}জ লোৱ\u{9be} সো\u{981}ম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}",
"ক\u{9be}\u{981}ড\u{9bc}",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("yuxarı dönən sağ ox"),
keywords: &["ox", "yuxarı dönən sağ ox"],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("стрэлка ўправа з паваротам уверх"),
keywords: &[
"кірунак",
"стрэлка",
"стрэлка ўправа з паваротам уверх",
"уверх",
],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("извита стрелка надясно и нагоре"),
keywords: &["извита стрелка надясно и нагоре", "стрелка"],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("ড\u{9be}ন তীর উপরের দিকে ব\u{9be}\u{981}ক\u{9be}নো"),
keywords: &["ড\u{9be}ন তীর উপরের দিকে ব\u{9be}\u{981}ক\u{9be}নো", "তীর"],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelica desno zakrivljena prema gore"),
keywords: &["strelica", "strelica desno zakrivljena prema gore"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("fletxa dreta que gira cap amunt"),
keywords: &[
"amunt",
"dreta",
"fletxa",
"fletxa dreta que gira cap amunt",
"gir",
],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎠᎦᏘᏏ ᎦᏝᏗ ᎠᏕᏲ ᎦᎸᎳᏗᎠᎦᏘ"),
keywords: &["ᎠᎦᏘᏏ ᎦᏝᏗ ᎠᏕᏲ ᎦᎸᎳᏗᎠᎦᏘ", "ᎦᏝᏗ"],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("šipka doprava stáčející se nahoru"),
keywords: &["šipka", "šipka doprava stáčející se nahoru"],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("saeth i’r dde yn troi i fyny"),
keywords: &["saeth", "saeth i’r dde yn troi i fyny"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("pil mod højre med sving opad"),
keywords: &["pil", "pil mod højre med sving opad"],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("geschwungener Pfeil nach oben"),
keywords: &[
"Pfeil",
"geschwungen",
"geschwungener Pfeil nach oben",
"nach oben",
"oben",
],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("δεξιό βέλος που στρίβει πάνω"),
keywords: &["βέλος", "δεξιό βέλος που στρίβει πάνω"],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("right arrow curving up"),
keywords: &["arrow", "right arrow curving up"],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flecha derecha curvándose hacia arriba"),
keywords: &[
"arriba",
"curva",
"dirección",
"flecha",
"flecha derecha curvándose hacia arriba",
],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("flecha de retorno al norte por la izquierda"),
keywords: &["flecha de retorno al norte por la izquierda"],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &["flecha", "flecha derecha curvándose hacia arriba"],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("kaardus nool üles"),
keywords: &["kaardus nool üles", "nool", "üles"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("gora okertzen den eskuinera gezia"),
keywords: &["gezi", "gora okertzen den eskuinera gezia"],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("پیکان خمیده بالا راست"),
keywords: &["پیکان", "پیکان خمیده بالا راست"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("ylös kääntyvä nuoli"),
keywords: &["nuoli", "ylös", "ylös kääntyvä nuoli"],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("pakanang arrow na kumurba pataas"),
keywords: &[
"arrow",
"direksyon",
"kurba",
"pakaliwa",
"pakanang arrow na kumurba pataas",
"pataas",
],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("bogin pílur sum peikar uppeftir"),
keywords: &["bogin", "bogin pílur sum peikar uppeftir", "pílur"],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèche courbe haut"),
keywords: &["flèche", "flèche courbe haut"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("flèche courbe vers le haut"),
keywords: &[
"courbe vers le haut",
"flèche",
"flèche courbe vers le haut",
],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("saighead dheas ag dul suas"),
keywords: &["saighead", "saighead dheas ag dul suas"],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("saighead dheas a’ lùbadh suas"),
keywords: &["saighead", "saighead dheas a’ lùbadh suas"],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frecha curvada cara arriba"),
keywords: &["arriba", "curva", "frecha", "frecha curvada cara arriba"],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("ઉપર વળત\u{ac1}\u{a82} જમણ\u{ac1}\u{a82} તીર"),
keywords: &["ઉપર વળત\u{ac1}\u{a82} જમણ\u{ac1}\u{a82} તીર", "તીર"],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("kibiyar dama mai lanƙwasa ta sama"),
keywords: &["kibiya", "kibiyar dama mai lanƙwasa ta sama"],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("חץ ימינה מתעקל מעלה"),
keywords: &["חץ", "חץ ימינה מתעקל מעלה", "למעלה", "מתעקל"],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("ऊपर की ओर म\u{941}ड\u{93c}ा दाया\u{901} तीर"),
keywords: &["ऊपर की ओर म\u{941}ड\u{93c}ा दाया\u{901} तीर", "तीर"],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("desna strelica koja se zakrivljuje prema gore"),
keywords: &["desna strelica koja se zakrivljuje prema gore", "strelica"],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("felfelé görbülő jobb nyíl"),
keywords: &["felfelé görbülő jobb nyíl", "nyíl"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("ձախից վերև թեքվող սլաք"),
keywords: &["ձախից վերև թեքվող սլաք", "սլաք"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah kanan melengkung ke atas"),
keywords: &["panah", "tanda panah kanan melengkung ke atas"],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("ube-akanri na-eme mgbagọ dị elu"),
keywords: &["ube", "ube-akanri na-eme mgbagọ dị elu"],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("sveig ör upp"),
keywords: &["sveig ör upp", "ör"],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("freccia curva in alto"),
keywords: &["curva", "freccia", "freccia curva in alto", "verso l’alto"],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("上カーブ矢印"),
keywords: &["カーブ", "上", "上カーブ矢印", "曲線", "矢印"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("panah nengen menggok munggah"),
keywords: &["panah", "panah nengen menggok munggah"],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ისარი, რომელიც იმრუდება მარჯვნიდან ზემოთ"),
keywords: &["ისარი", "ისარი, რომელიც იმრუდება მარჯვნიდან ზემოთ"],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("aneccab ayeffus yettin d asawen"),
keywords: &["aneccab ayeffus yettin d asawen"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("оң жақтан шығып, жоғарыға бұрылатын көрсеткі"),
keywords: &["көрсеткі", "оң жақтан шығып, жоғарыға бұрылатын көрсеткі"],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("pil mod højre med sving opad"),
keywords: &["pil", "pil mod højre med sving opad"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some(
"ព\u{17d2}រ\u{17bd}ញកោងព\u{17b8}ក\u{17d2}រោមទៅលើក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}នជ\u{17d2}រ\u{17bb}ង",
),
keywords: &[
"ទ\u{17b7}ស",
"ទ\u{17b7}សដៅ",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញកោងព\u{17b8}ក\u{17d2}រោមទៅលើក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}នជ\u{17d2}រ\u{17bb}ង",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಮೇಲಕ\u{ccd}ಕ\u{cc6} ತ\u{cbf}ರುಗುವ ಬಲ ಬಾಣ"),
keywords: &["ಬಾಣ", "ಮೇಲಕ\u{ccd}ಕ\u{cc6} ತ\u{cbf}ರುಗುವ ಬಲ ಬಾಣ"],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("위쪽으로 꺾어지는 우향 화살표"),
keywords: &["위쪽으로 꺾어지는 우향 화살표", "좌회전", "화살표"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("उजवो बाण वयर वळा"),
keywords: &["उजवो बाण वयर वळा", "बाण"],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("оңдон өйдө ийилген жебе"),
keywords: &["жебе", "оңдон өйдө ийилген жебе"],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("Rietsfeil mat Kéier no uewen"),
keywords: &["Feiler", "Rietsfeil mat Kéier no uewen"],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນໂຄ\u{ec9}ງຂວາຂ\u{eb6}\u{ec9}ນເທ\u{eb4}ງ"),
keywords: &[
"ຂວາ",
"ຂ\u{eb6}\u{ec9}ນເທ\u{eb4}ງ",
"ລ\u{eb9}ກສອນ",
"ລ\u{eb9}ກສອນໂຄ\u{ec9}ງຂວາຂ\u{eb6}\u{ec9}ນເທ\u{eb4}ງ",
"ໂຄ\u{ec9}ງ",
],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("rodyklė į dešinę, užlenkta į viršų"),
keywords: &["rodyklė", "rodyklė į dešinę, užlenkta į viršų"],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("augšup vērsta izliekta bultiņa"),
keywords: &["augšup vērsta izliekta bultiņa", "bultiņa"],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pere matau piko ake"),
keywords: &["pere", "pere matau piko ake"],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("десна стрелка што врти нагоре"),
keywords: &["горе", "десна стрелка што врти нагоре", "десно", "стрелка"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some("മ\u{d41}കളിലേക\u{d4d}ക\u{d4d} വളഞ\u{d4d}ഞ വലത\u{d4d} അമ\u{d4d}പടയ\u{d3e}ളം"),
keywords: &[
"അമ\u{d4d}പടയ\u{d3e}ളം",
"മ\u{d41}കളിലേക\u{d4d}ക\u{d4d} വളഞ\u{d4d}ഞ വലത\u{d4d} അമ\u{d4d}പടയ\u{d3e}ളം",
"മ\u{d41}\u{d41}കളിലേക\u{d4d}ക\u{d4d}",
"വളവ\u{d4d}",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("дээшээ заасан баруун сум"),
keywords: &["баруун", "дээш", "дээшээ заасан баруун сум", "заах", "сум"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("उजवा वर वळल\u{947}ला बाण"),
keywords: &["उजवा वर वळल\u{947}ला बाण", "बाण"],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("anak panah ke kanan melengkung ke atas"),
keywords: &["anak panah", "anak panah ke kanan melengkung ke atas"],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("vleġġa leminija mgħawġa ’l fuq"),
keywords: &["vleġġa", "vleġġa leminija mgħawġa ’l fuq"],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("ညာည\u{103d}\u{103e}န\u{103a} အပေါ\u{103a}ဝ\u{102d}\u{102f}က\u{103a} မြား"),
keywords: &[
"ညာည\u{103d}\u{103e}န\u{103a} အပေါ\u{103a}ဝ\u{102d}\u{102f}က\u{103a} မြား",
"မြား",
"အပေါ\u{103a}က\u{103d}ေ\u{1037} ညာ မြား",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("pil som bøyer opp"),
keywords: &["pil", "pil som bøyer opp"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("माथि मोड\u{947}को दाया\u{901} वाण"),
keywords: &["माथि मोड\u{947}को दाया\u{901} वाण", "वाण"],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("pijl naar rechts die omhoog draait"),
keywords: &["pijl", "pijl naar rechts die omhoog draait"],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("pil som bøyer opp"),
keywords: &["pil", "pil som bøyer opp"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ଡ\u{b3e}ହ\u{b3e}ଣ ତୀର ଉପରକ\u{b41} ମୋଡ\u{b3c}\u{b3f} ହେଉଛ\u{b3f}"),
keywords: &[
"ଡ\u{b3e}ହ\u{b3e}ଣ ତୀର ଉପରକ\u{b41} ମୋଡ\u{b3c}\u{b3f} ହେଉଛ\u{b3f}",
"ତୀର",
],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਉ\u{a71}ਪਰ ਮ\u{a41}ੜਨ ਵਾਲਾ ਸ\u{a71}ਜਾ ਤੀਰ"),
keywords: &["ਉ\u{a71}ਪਰ ਮ\u{a41}ੜਨ ਵਾਲਾ ਸ\u{a71}ਜਾ ਤੀਰ", "ਤੀਰ"],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("سجا تیر ا\u{64f}وتے مڑدیاں ہوئیاں"),
keywords: &["تیر", "سجا تیر ا\u{64f}وتے مڑدیاں ہوئیاں"],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("Raít Áro Kọ\u{301}v Gó Ọp"),
keywords: &["Raít Áro Kọ\u{301}v Gó Ọp", "Áro"],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("strzałka w prawo skręcająca w górę"),
keywords: &["strzałka", "strzałka w prawo skręcająca w górę", "zakręt"],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("ښۍ غشی پورته کوږ"),
keywords: &["غشی", "ښۍ غشی پورته ته کوږ", "ښۍ غشی پورته کوږ"],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("seta para a direita curvada para cima"),
keywords: &[
"curva",
"seta",
"seta curva de baixo para cima",
"seta para a direita curvada para cima",
"seta para cima",
],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("seta para a direita com curva para cima"),
keywords: &[
"curva",
"para cima",
"seta",
"seta para a direita com curva para cima",
],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("paña wach’i llink’uwan hanaqman"),
keywords: &["paña wach’i llink’uwan hanaqman"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("săgeată dreapta curbată în sus"),
keywords: &["săgeată", "săgeată dreapta curbată în sus"],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-154"),
keywords: &["E10-154"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("стрелка вправо с поворотом вверх"),
keywords: &[
"изгиб",
"изогнутая стрелка",
"поворот вверх",
"стрелка вправо",
"стрелка вправо с поворотом вверх",
],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("umwambi w’iburyo uhetamiye hejuru"),
keywords: &["umwambi", "umwambi w’iburyo uhetamiye hejuru"],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("ساڄي تير مڙندي مٿين طرف"),
keywords: &["تير", "ساڄي تير مڙندي مٿين طرف"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("ඉහළට රව\u{dd4}ම\u{dca} ව\u{dd6} දක\u{dd4}ණ\u{dd4} ඊතලය"),
keywords: &[
"ඉහළට රව\u{dd4}ම\u{dca} ව\u{dd6} දක\u{dd4}ණ\u{dd4} ඊතලය",
"ඊතලය",
],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("šípka doprava zakrivená nahor"),
keywords: &["šípka", "šípka doprava zakrivená nahor"],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("puščica desno, ki se ukrivlja gor"),
keywords: &["puščica", "puščica desno, ki se ukrivlja gor"],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("fallaarta midig u qalloocsan kor"),
keywords: &["fallaar", "fallaarta midig u qalloocsan kor"],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("shigjetë djathtas e përkulur lart"),
keywords: &["shigjetë", "shigjetë djathtas e përkulur lart"],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелица која скреће нагоре"),
keywords: &["стрeлицa", "стрелица која скреће нагоре"],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelica koja skreće nagore"),
keywords: &["strelica", "strelica koja skreće nagore"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("svängd uppåtpil"),
keywords: &["pil", "sväng", "svängd uppåtpil", "uppåt"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mshale wa kulia unaopinda juu"),
keywords: &["mshale", "mshale wa kulia unaopinda juu"],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some(
"மேல\u{bcd} நோக\u{bcd}கி வளைந\u{bcd}திருக\u{bcd}கும\u{bcd} வலது அம\u{bcd}புக\u{bcd}குறி",
),
keywords: &[
"அம\u{bcd}புக\u{bcd}குறி",
"மேல\u{bcd} நோக\u{bcd}கி வளைந\u{bcd}த வலது அம\u{bcd}புக\u{bcd}குறி",
"மேல\u{bcd} நோக\u{bcd}கி வளைந\u{bcd}திருக\u{bcd}கும\u{bcd} வலது அம\u{bcd}புக\u{bcd}குறி",
],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("ప\u{c48}క\u{c3f} వంపు త\u{c3f}ర\u{c3f}గ\u{c3f}న కుడ\u{c3f} బ\u{c3e}ణం"),
keywords: &[
"కుడ\u{c3f}",
"ప\u{c48}క\u{c3f}",
"ప\u{c48}క\u{c3f} వంపు త\u{c3f}ర\u{c3f}గ\u{c3f}న కుడ\u{c3f} బ\u{c3e}ణం",
"బ\u{c3e}ణం",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("тири рости ба тарафи боло каҷшаванда"),
keywords: &["тир", "тири рости ба тарафи боло каҷшаванда"],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("ล\u{e39}กศรโค\u{e49}งข\u{e36}\u{e49}น"),
keywords: &[
"ข\u{e35}\u{e49}น",
"ล\u{e39}กศร",
"ล\u{e39}กศรโค\u{e49}งข\u{e36}\u{e49}น",
],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("የማናይ ምልክት ናብ ላዕሊ ዝተጠውየ"),
keywords: &["ምልክት", "የማናይ ምልክት ናብ ላዕሊ ዝተጠውየ"],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("saga ok, ýokary egrelýän"),
keywords: &["ok", "saga ok, ýokary egrelýän"],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau afe ki ʻolunga"),
keywords: &["afe", "hake", "ngahau", "ngahau afe ki ʻolunga", "ʻolunga"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("yukarı kıvrımlı sağ ok"),
keywords: &["ok", "yukarı kıvrımlı sağ ok"],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("ئۈستىگە ئېگىلگەن ئوڭ كۆرسەتكۈچ"),
keywords: &["ئۈستىگە ئېگىلگەن ئوڭ كۆرسەتكۈچ", "كۆرسەتكۈچ"],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("стрілка вправо з вигином угору"),
keywords: &[
"стрілка",
"стрілка вправо з вигином угору",
"стрілка праворуч із поворотом угору",
],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("اوپر مڑتا ہوا دائیں تیر"),
keywords: &["اوپر مڑتا ہوا دائیں تیر", "تیر", "سائن", "سمت"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("o‘ngdan yuqoriga qayrilish"),
keywords: &["o‘ngdan yuqoriga qayrilish", "strelka"],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("mũi tên phải cong lên"),
keywords: &["mũi tên", "mũi tên phải cong lên"],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("fettu ndijoor lemoo kaw"),
keywords: &["fett", "fettu ndijoor lemoo kaw"],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("utolo olusekunene olugobe phezulu"),
keywords: &["utolo", "utolo olusekunene olugobe phezulu"],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà apá ọ\u{300}tún onígun òkè"),
keywords: &[
"àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà apá ọ\u{300}tún onígun òkè",
"ọfà",
],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("向右箭咀彎向上"),
keywords: &["向右箭咀彎向上", "箭咀"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("向右箭咀弯向上"),
keywords: &["向右箭咀弯向上", "箭咀"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("右上弯箭头"),
keywords: &["右上弯", "右上弯箭头", "向上弯曲的右箭头", "箭头"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("右上旋轉箭頭"),
keywords: &["右上旋轉箭頭"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("向上彎嘅右箭嘴"),
keywords: &["向上彎嘅右箭嘴"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("umcibisholo wakwesokudla ogobele phezulu"),
keywords: &["umcibisholo", "umcibisholo wakwesokudla ogobele phezulu"],
},
],
};
#[doc = "⤵\u{fe0f}"]
pub const RIGHT_ARROW_CURVING_DOWN: crate::Emoji = crate::Emoji {
glyph: "⤵\u{fe0f}",
codepoint: "2935 FE0F",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "right arrow curving down",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[crate::Emoji {
glyph: "⤵",
codepoint: "2935",
status: crate::Status::Unqualified,
introduction_version: 0.6f32,
name: "right arrow curving down",
group: "Symbols",
subgroup: "arrow",
is_variant: true,
variants: &[],
annotations: &[],
}],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("pyl na regs buig af"),
keywords: &["af", "krulpyl na regs onder", "pyl", "pyl na regs buig af"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("ወደ ታች ታጣፊ ቀኝ ጠቋሚ ቀስት"),
keywords: &["ቀስት", "ታች", "ወደ ታች ታጣፊ ቀኝ ጠቋሚ ቀስት"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("سهم لأسفل من اليسار"),
keywords: &["سهم", "سهم لأسفل من اليسار", "لأسفل"],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some(
"তললৈ ভ\u{9be}\u{981}জ লোৱ\u{9be} সো\u{981}ম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}",
),
keywords: &[
"ক\u{9be}\u{981}ড\u{9bc}",
"তললৈ ভ\u{9be}\u{981}জ লোৱ\u{9be} সো\u{981}ম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("aşağı dönən sağ ox"),
keywords: &["aşağı", "aşağı dönən sağ ox", "ox"],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("стрэлка ўправа з паваротам уніз"),
keywords: &[
"кірунак",
"стрэлка",
"стрэлка ўправа з паваротам уніз",
"уніз",
],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("извита стрелка надясно и надолу"),
keywords: &["извита стрелка надясно и надолу", "надолу", "стрелка"],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("ড\u{9be}ন তীর নীচের দিকে ব\u{9be}\u{981}ক\u{9be}নো"),
keywords: &[
"ড\u{9be}ন তীর নীচের দিকে ব\u{9be}\u{981}ক\u{9be}নো",
"তীর",
"নিম\u{9cd}নম\u{9c1}খী",
],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelica desno zakrivljena prema dolje"),
keywords: &["strelica", "strelica desno zakrivljena prema dolje"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("fletxa dreta que gira cap avall"),
keywords: &[
"avall",
"dreta",
"fletxa",
"fletxa dreta que gira cap avall",
"gir",
],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎠᎦᏘᏏ ᎦᏝᏗ ᎠᏑᏲ ᎡᎳᏗᎠᎦᏘ"),
keywords: &["ᎠᎦᏘᏏ ᎦᏝᏗ ᎠᏑᏲ ᎡᎳᏗᎠᎦᏘ", "ᎡᎳᏗᎠᎦᏘ", "ᎦᏝᏗ"],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("šipka doprava stáčející se dolů"),
keywords: &["dolů", "šipka", "šipka doprava stáčející se dolů"],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("saeth i’r dde yn troi i lawr"),
keywords: &["i lawr", "saeth", "saeth i’r dde yn troi i lawr"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("pil mod højre med sving nedad"),
keywords: &["ned", "pil", "pil mod højre med sving nedad"],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("geschwungener Pfeil nach unten"),
keywords: &[
"Pfeil",
"geschwungen",
"geschwungener Pfeil nach unten",
"nach unten",
"unten",
],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("δεξιό βέλος που στρίβει κάτω"),
keywords: &["βέλος", "δεξιό βέλος που στρίβει κάτω", "κάτω"],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("right arrow curving down"),
keywords: &["arrow", "down", "right arrow curving down"],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flecha derecha curvándose hacia abajo"),
keywords: &[
"abajo",
"curva",
"dirección",
"flecha",
"flecha derecha curvándose hacia abajo",
],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("flecha de retorno al sur por la derecha"),
keywords: &["flecha de retorno al sur por la derecha"],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &["abajo", "flecha", "flecha derecha curvándose hacia abajo"],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("kaardus nool alla"),
keywords: &["alla", "kaardus nool alla", "nool"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("behera okertzen den eskuinera gezia"),
keywords: &["behera", "behera okertzen den eskuinera gezia", "gezi"],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("پیکان خمیده پایین راست"),
keywords: &["پایین", "پیکان", "پیکان خمیده پایین راست"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("alas kääntyvä nuoli"),
keywords: &["alas", "alas kääntyvä nuoli", "nuoli"],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("pakanang arrow na kumurba pababa"),
keywords: &[
"arrow",
"direksyon",
"kurba",
"pababa",
"pakanan",
"pakanang arrow na kumurba pababa",
],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("bogin pílur sum peikar niðureftir"),
keywords: &["bogin", "bogin pílur sum peikar niðureftir", "pílur"],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèche courbe bas"),
keywords: &["flèche", "flèche courbe bas"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("flèche courbe vers le bas"),
keywords: &["courbe vers le bas", "flèche", "flèche courbe vers le bas"],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("saighead dheas ag dul síos"),
keywords: &["saighead", "saighead dheas ag dul síos", "síos"],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("saighead dheas a’ lùbadh sìos"),
keywords: &["saighead", "saighead dheas a’ lùbadh sìos", "sìos"],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frecha curvada cara abaixo"),
keywords: &["abaixo", "curva", "frecha", "frecha curvada cara abaixo"],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("નીચ\u{ac7} વળત\u{ac1}\u{a82} જમણ\u{ac1}\u{a82} તીર"),
keywords: &["તીર", "નીચ\u{ac7} વળત\u{ac1}\u{a82} જમણ\u{ac1}\u{a82} તીર"],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("kibiyar dama mai lanƙwasa ta ƙasa"),
keywords: &["kibiya", "kibiyar dama mai lanƙwasa ta ƙasa", "ƙasa"],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("חץ ימינה מתעקל מטה"),
keywords: &["חץ", "חץ ימינה מתעקל מטה", "למטה", "מתעקל"],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("नीच\u{947} की ओर म\u{941}ड\u{93c}ा दाया\u{901} तीर"),
keywords: &["तीर", "नीच\u{947} की ओर म\u{941}ड\u{93c}ा दाया\u{901} तीर"],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("desna strelica koja se zakrivljuje prema dolje"),
keywords: &["desna strelica koja se zakrivljuje prema dolje", "strelica"],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("lefelé görbülő jobb nyíl"),
keywords: &["lefelé", "lefelé görbülő jobb nyíl", "nyíl"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("ձախից ներքև թեքվող սլաք"),
keywords: &["ձախից ներքև թեքվող սլաք", "ներքև", "սլաք"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah kanan melengkung ke bawah"),
keywords: &["bawah", "panah", "tanda panah kanan melengkung ke bawah"],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("ube-akanri na-eme mgbagọ dị ala"),
keywords: &["ala", "ube", "ube-akanri na-eme mgbagọ dị ala"],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("sveigð ör niður"),
keywords: &["niður", "sveigð ör niður", "ör"],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("freccia curva in basso"),
keywords: &[
"curva",
"freccia",
"freccia curva in basso",
"verso il basso",
],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("下カーブ矢印"),
keywords: &["カーブ", "下", "下カーブ矢印", "曲線", "矢印"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("panah nengen menggok mudhun"),
keywords: &["panah", "panah nengen menggok mudhun"],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ისარი, რომელიც იმრუდება მარჯვნიდან ქვემოთ"),
keywords: &[
"ისარი",
"ისარი, რომელიც იმრუდება მარჯვნიდან ქვემოთ",
"ქვემოთ",
],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("aneccab ayeffus yettin d akesser"),
keywords: &["aneccab ayeffus yettin d akesser"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("оң жақтан шығып, төменге бұрылатын көрсеткі"),
keywords: &[
"көрсеткі",
"оң жақтан шығып, төменге бұрылатын көрсеткі",
"төмен",
],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("pil mod højre med sving nedad"),
keywords: &["ned", "pil", "pil mod højre med sving nedad"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some("ព\u{17d2}រ\u{17bd}ញស\u{17d2}ដា\u{17c6}កោងច\u{17bb}ះក\u{17d2}រោម"),
keywords: &[
"ច\u{17bb}ះក\u{17d2}រោម",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញស\u{17d2}ដា\u{17c6}កោងច\u{17bb}ះក\u{17d2}រោម",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಕ\u{cc6}ಳಕ\u{ccd}ಕ\u{cc6} ತ\u{cbf}ರುಗುವ ಬಲ ಬಾಣ"),
keywords: &["ಕ\u{cc6}ಳಕ\u{ccd}ಕ\u{cc6} ತ\u{cbf}ರುಗುವ ಬಲ ಬಾಣ", "ಬಾಣ"],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("아래쪽으로 꺾어지는 우향 화살표"),
keywords: &["아래쪽으로 꺾어지는 우향 화살표", "우회전", "화살표"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("उजवो बाण सकयल वळा"),
keywords: &["उजवो बाण सकयल वळा", "बाण", "सकयल"],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("оңдон төмөн ийилген жебе"),
keywords: &["жебе", "оңдон төмөн ийилген жебе"],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("Rietsfeil mat Kéier no ënnen"),
keywords: &["Feil", "Rietsfeil mat Kéier no ënnen", "no ënnen"],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນໂຄ\u{ec9}ງຂວາລ\u{ebb}ງລ\u{eb8}\u{ec8}ມ"),
keywords: &[
"ຂວາ",
"ລ\u{eb9}ກສອນ",
"ລ\u{eb9}ກສອນໂຄ\u{ec9}ງຂວາລ\u{ebb}ງລ\u{eb8}\u{ec8}ມ",
"ລ\u{ebb}ງລ\u{eb8}\u{ec8}ມ",
"ໂຄ\u{ec9}ງ",
],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("rodyklė į dešinę, užlenkta į apačią"),
keywords: &["rodyklė", "rodyklė į dešinę, užlenkta į apačią", "į apačią"],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("lejup vērsta izliekta bultiņa"),
keywords: &["bultiņa", "lejup vērsta izliekta bultiņa"],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pere matau piko iho"),
keywords: &["iho", "pere", "pere matau piko iho"],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("десна стрелка што врти надолу"),
keywords: &["десна стрелка што врти надолу", "десно", "долу", "стрелка"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some("ത\u{d3e}ഴേക\u{d4d}ക\u{d4d} വളഞ\u{d4d}ഞ വലത\u{d4d} അമ\u{d4d}പടയ\u{d3e}ളം"),
keywords: &[
"അമ\u{d4d}പടയ\u{d3e}ളം",
"ത\u{d3e}ഴേക\u{d4d}ക\u{d4d}",
"ത\u{d3e}ഴേക\u{d4d}ക\u{d4d} വളഞ\u{d4d}ഞ വലത\u{d4d} അമ\u{d4d}പടയ\u{d3e}ളം",
"വളവ\u{d4d}",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("доошоо заасан баруун сум"),
keywords: &["баруун", "доош", "доошоо заасан баруун сум", "сум"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("उजवा खाली वळल\u{947}ला बाण"),
keywords: &["उजवा खाली वळल\u{947}ला बाण", "खाली", "बाण"],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("anak panah ke kanan melengkung ke bawah"),
keywords: &[
"anak panah",
"anak panah ke kanan melengkung ke bawah",
"ke bawah",
],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("vleġġa leminija mgħawġa ’l isfel"),
keywords: &["vleġġa", "vleġġa leminija mgħawġa ’l isfel", "’l isfel"],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("ညာည\u{103d}\u{103e}န\u{103a} အောက\u{103a}ဝ\u{102d}\u{102f}က\u{103a} မြား"),
keywords: &[
"ညာည\u{103d}\u{103e}န\u{103a} အောက\u{103a}ဝ\u{102d}\u{102f}က\u{103a} မြား",
"မြား",
"အောက\u{103a}ည\u{103d}\u{103e}န\u{103a}မြား",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("pil som bøyer ned"),
keywords: &["ned", "pil", "pil som bøyer ned"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("तल मोड\u{947}को दाया\u{901} वाण"),
keywords: &["तल", "तल मोड\u{947}को दाया\u{901} वाण", "वाण"],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("pijl naar rechts die omlaag draait"),
keywords: &["omlaag", "pijl", "pijl naar rechts die omlaag draait"],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("pil som bøyer ned"),
keywords: &["ned", "pil", "pil som bøyer ned"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ଡ\u{b3e}ହ\u{b3e}ଣ ତୀର ତଳକ\u{b41} ମୋଡ\u{b3c}\u{b3f} ହେଉଛ\u{b3f}"),
keywords: &[
"ଡ\u{b3e}ହ\u{b3e}ଣ ତୀର ତଳକ\u{b41} ମୋଡ\u{b3c}\u{b3f} ହେଉଛ\u{b3f}",
"ତଳ",
"ତୀର",
],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਥ\u{a71}ਲ\u{a47} ਮ\u{a41}ੜਨ ਵਾਲਾ ਖ\u{a71}ਬਾ ਤੀਰ"),
keywords: &["ਤੀਰ", "ਥ\u{a71}ਲ\u{a47} ਮ\u{a41}ੜਨ ਵਾਲਾ ਖ\u{a71}ਬਾ ਤੀਰ"],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("سجا تیر ہیٹھاں مڑدیاں ہوئیاں"),
keywords: &["تیر", "سجا تیر ہیٹھاں مڑدیاں ہوئیاں", "ہیٹھاں"],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("Raít Áro Kọ\u{301}v Gó Daun"),
keywords: &["Daun", "Raít Áro Kọ\u{301}v Gó Daun", "Áro"],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("strzałka w prawo skręcająca w dół"),
keywords: &[
"strzałka",
"strzałka w prawo skręcająca dół",
"strzałka w prawo skręcająca w dół",
"zakręt",
],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("ښۍ غشی ښکته کوږ"),
keywords: &["غشی", "ښکته", "ښۍ غشی ښکته کوږ"],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("seta para a direita curvada para baixo"),
keywords: &[
"curva",
"seta curva de cima para baixo",
"seta para a direita curvada para baixo",
"seta para baixo",
],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("seta para a direita com curva para baixo"),
keywords: &[
"curva",
"para baixo",
"seta",
"seta para a direita com curva para baixo",
],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("paña wach’i llink’uwan uranman"),
keywords: &["paña wach’i llink’uwan uranman"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("săgeată dreapta curbată în jos"),
keywords: &["jos", "săgeată", "săgeată dreapta curbată în jos"],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-155"),
keywords: &["E10-155"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("стрелка вправо с поворотом вниз"),
keywords: &[
"изгиб",
"изогнутая стрелка",
"поворот вниз",
"стрелка вправо",
"стрелка вправо с поворотом вниз",
],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("umwambi w’iburyo uhetamiye hasi"),
keywords: &["hasi", "umwambi", "umwambi w’iburyo uhetamiye hasi"],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("هيٺ مڙندي ساڄي طرف تير"),
keywords: &["تير", "هيٺ", "هيٺ مڙندي ساڄي طرف تير"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("පහළට රව\u{dd4}ම\u{dca} ව\u{dd6} දක\u{dd4}ණ\u{dd4} ඊතලය"),
keywords: &[
"ඊතලය",
"පහළ",
"පහළට රව\u{dd4}ම\u{dca} ව\u{dd6} දක\u{dd4}ණ\u{dd4} ඊතලය",
],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("šípka doprava zakrivená nadol"),
keywords: &["nadol", "šípka", "šípka doprava zakrivená nadol"],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("puščica desno, ki se ukrivlja dol"),
keywords: &["navzdol", "puščica", "puščica desno, ki se ukrivlja dol"],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("fallaarta midig u qalloocsan hoos"),
keywords: &["fallaar", "fallaarta midig u qalloocsan hoos"],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("shigjetë djathtas e përkulur poshtë"),
keywords: &["poshtë", "shigjetë", "shigjetë djathtas e përkulur poshtë"],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелица која скреће надоле"),
keywords: &["стрeлицa", "стрелица која скреће надоле"],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("стрелица која скреће надоље"),
keywords: &["стрелица која скреће надоље"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelica koja skreće nadole"),
keywords: &["strelica", "strelica koja skreće nadole"],
},
#[cfg(feature = "sr_Latn_BA")]
crate::Annotation {
lang: "sr_Latn_BA",
tts: Some("strelica koja skreće nadolje"),
keywords: &["strelica koja skreće nadolje"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("svängd nedåtpil"),
keywords: &["nedåt", "pil", "sväng", "svängd nedåtpil"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mshale wa kulia unaopinda chini"),
keywords: &["chini", "mshale", "mshale wa kulia unaopinda chini"],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some(
"க\u{bc0}ழ\u{bcd}நோக\u{bcd}கி வளைந\u{bcd}திருக\u{bcd}கும\u{bcd} வலது அம\u{bcd}புக\u{bcd}குறி",
),
keywords: &[
"அம\u{bcd}புக\u{bcd}குறி",
"க\u{bc0}ழ\u{bcd}நோக\u{bcd}கி வளைந\u{bcd}த வலது அம\u{bcd}புக\u{bcd}குறி",
"க\u{bc0}ழ\u{bcd}நோக\u{bcd}கி வளைந\u{bcd}திருக\u{bcd}கும\u{bcd} வலது அம\u{bcd}புக\u{bcd}குறி",
],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some(
"క\u{c4d}ర\u{c3f}ంద\u{c3f}క\u{c3f} వంపు త\u{c3f}ర\u{c3f}గ\u{c3f}న కుడ\u{c3f} బ\u{c3e}ణం",
),
keywords: &[
"క\u{c4d}ర\u{c3f}ంద\u{c3f}క\u{c3f}",
"క\u{c4d}ర\u{c3f}ంద\u{c3f}క\u{c3f} వంపు త\u{c3f}ర\u{c3f}గ\u{c3f}న కుడ\u{c3f} బ\u{c3e}ణం",
"బ\u{c3e}ణం",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("тири рости ба тарафи поён каҷшаванда"),
keywords: &["поён", "тир", "тири рости ба тарафи поён каҷшаванда"],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("ล\u{e39}กศรโค\u{e49}งลง"),
keywords: &["ลง", "ล\u{e39}กศร", "ล\u{e39}กศรโค\u{e49}งลง"],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("የማናይ ምልክት ናብ ታሕቲ ዝተጠውየ"),
keywords: &["ምልክት", "ታሕቲ", "የማናይ ምልክት ናብ ታሕቲ ዝተጠውየ"],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("saga ok, aşak egrelýän"),
keywords: &["aşak", "ok", "saga ok, aşak egrelýän"],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau afe ki lalo"),
keywords: &["afe", "hifo", "lalo", "ngahau", "ngahau afe ki lalo"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("aşağı kıvrımlı sağ ok"),
keywords: &["aşağı", "aşağı kıvrımlı sağ ok", "ok"],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("ئاستىغا ئېگىلگەن ئوڭ كۆرسەتكۈچ"),
keywords: &["ئاستى", "ئاستىغا ئېگىلگەن ئوڭ كۆرسەتكۈچ", "كۆرسەتكۈچ"],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("стрілка вправо з вигином униз"),
keywords: &[
"вниз",
"стрілка",
"стрілка вправо з вигином униз",
"стрілка праворуч із поворотом униз",
],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("نیچے مڑتا ہوا دائیں تیر"),
keywords: &["تیر", "سائن", "سمت", "نیچے مڑتا ہوا دائیں تیر"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("o‘ngdan pastga qayrilish"),
keywords: &["o‘ngdan pastga qayrilish", "pastga", "strelka"],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("mũi tên phải cong xuống"),
keywords: &["mũi tên", "mũi tên phải cong xuống", "xuống"],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("fettu ndijoor lemoo suuf"),
keywords: &["fett", "fettu ndijoor lemoo suuf", "suuf"],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("utolo olusekunene olugobe ezantsi"),
keywords: &["ezantsi", "utolo", "utolo olusekunene olugobe ezantsi"],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some(
"àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà apá ọ\u{300}tún onígun ìsàlẹ\u{300}",
),
keywords: &[
"ilẹ\u{300}",
"o\u{329}fà",
"àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà apá ọ\u{300}tún onígun ìsàlẹ\u{300}",
],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("向右箭咀彎向下"),
keywords: &["向下", "向右箭咀彎向下", "箭咀"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("向右箭咀弯向下"),
keywords: &["向下", "向右箭咀弯向下", "箭咀"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("右下弯箭头"),
keywords: &["右下弯", "右下弯箭头", "向下弯曲的右箭头", "箭头"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("右下旋轉箭頭"),
keywords: &["右下旋轉箭頭"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("向下彎嘅右箭嘴"),
keywords: &["向下彎嘅右箭嘴"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("umcibisholo wakwesokudla ogobele phansi"),
keywords: &[
"phansi",
"umcibisholo",
"umcibisholo wakwesokudla ogobele phansi",
],
},
],
};
#[doc = "🔃"]
pub const CLOCKWISE_VERTICAL_ARROWS: crate::Emoji = crate::Emoji {
glyph: "🔃",
codepoint: "1F503",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "clockwise vertical arrows",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("kloksgewyse vertikale pyle"),
keywords: &["herlaai", "kloksgewys", "kloksgewyse vertikale pyle", "pyl"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("በሰዓት አዟዟር አቅጣጫ ያሉ ቀጥ ያሉ ቀስቶች"),
keywords: &[
"ቀስት",
"በሰዓት አቆጣጠር አቅጣጫ",
"በሰዓት አዟዟር አቅጣጫ ያሉ ቀጥ ያሉ ቀስቶች",
"ዳግም ጫን",
],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("سهمان رأسيان مع عقارب الساعة"),
keywords: &[
"أسهم رأسية مع عقارب الساعة",
"أسهم عمودية باتجاه عقارب الساعة",
"سهمان رأسيان مع عقارب الساعة",
"علامة إعادة تحميل",
],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some(
"ঘড\u{9bc}ী ক\u{9be}\u{981}ট\u{9be}ৰ দিশৰ উলম\u{9cd}ব ক\u{9be}\u{981}ড\u{9bc}",
),
keywords: &[
"ক\u{9be}\u{981}ড\u{9bc}",
"ঘড\u{9bc}ী ক\u{9be}\u{981}ট\u{9be}ৰ দিশৰ উলম\u{9cd}ব ক\u{9be}\u{981}ড\u{9bc}",
"ঘড\u{9bc}ীৰ ক\u{9be}\u{981}ট\u{9be}ৰ দিশৰ",
"প\u{9c1}নঃলোড",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("saat istiqamətində yönəlmiş şaquli ox"),
keywords: &[
"ox",
"saat istiqaməti",
"saat istiqamətində yönəlmiş şaquli ox",
"yenidən yüklə",
],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("вертыкальныя стрэлкі па гадзіннікавай стрэлцы"),
keywords: &[
"вертыкальныя стрэлкі па гадзіннікавай стрэлцы",
"па гадзіннікавай стрэлцы",
"перазагрузіць",
"стрэлка",
],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("вертикални стрелки по часовниковата стрелка"),
keywords: &[
"вертикални стрелки по часовниковата стрелка",
"по часовниковата стрелка",
"презареждане",
"стрелка",
],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("ঘড\u{9bc}ির ক\u{9be}\u{981}ট\u{9be}র উল\u{9cd}লম\u{9cd}ব তীর"),
keywords: &[
"ঘড\u{9bc}ির ক\u{9be}\u{981}ট\u{9be}র উল\u{9cd}লম\u{9cd}ব তীর",
"ঘড\u{9bc}ির ক\u{9be}\u{981}ট\u{9be}র দিকে",
"তীর",
"প\u{9c1}নর\u{9be}য\u{9bc} লোড",
],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelice vertikalno kretanje kazaljki na satu"),
keywords: &[
"smjer kretanja kazaljki na satu",
"strelica",
"strelice vertikalno kretanje kazaljki na satu",
],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("fletxes que giren cap a la dreta"),
keywords: &[
"dreta",
"fletxes",
"fletxes que giren cap a la dreta",
"fletxes verticals en sentit horari",
"gir",
],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎪᏪᎵ ᏗᎦᏂᏱᏙᏗ"),
keywords: &["ᎦᏝᏗ", "ᎪᏪᎵ ᏗᎦᏂᏱᏙᏗ", "ᏙᎪᏢᎯᏐᏗ", "ᏩᏥ ᎤᏪᏅᏍᏗ"],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("svislé šipky ve směru hodinových ručiček"),
keywords: &[
"doprava",
"hodin",
"načtení",
"načíst",
"nové",
"opětovné",
"ručiček",
"směr",
"směru",
"svislé šipky ve směru hodinových ručiček",
"znovu",
"šipka",
],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("saethau clocwedd fertigol"),
keywords: &[
"ail-lwytho",
"clocwedd",
"saeth",
"saethau clocwedd fertigol",
],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("lodrette cirkulære pile med uret"),
keywords: &[
"cirkulære pile",
"genindlæs",
"lodrette cirkulære pile med uret",
"lodrette pile",
"med uret",
"vertikale pile",
],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("kreisförmige Pfeile im Uhrzeigersinn"),
keywords: &[
"Pfeile",
"im Uhrzeigersinn",
"kreisförmige Pfeile im Uhrzeigersinn",
],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("δεξιόστροφα κάθετα βέλη"),
keywords: &[
"βέλος",
"δεξιόστροφα κάθετα βέλη",
"δεξιόστροφο",
"επαναφόρτωση",
],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("clockwise vertical arrows"),
keywords: &["arrow", "clockwise", "clockwise vertical arrows", "reload"],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flechas verticales en sentido horario"),
keywords: &[
"flechas",
"flechas verticales en sentido horario",
"flechas verticales sentido horario",
"horario",
"señal de recarga",
],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &[
"flecha",
"flechas verticales en sentido horario",
"recarga",
"sentido horario",
],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("vertikaalsed nooled päripäeva"),
keywords: &[
"laadimine",
"nool",
"päripäeva",
"vertikaalsed nooled päripäeva",
],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("eskuinera gezi bertikalak"),
keywords: &[
"berriro",
"eskuinera",
"eskuinera gezi bertikalak",
"gezi",
"kargatu",
],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("جهت عقربه\u{200c}های ساعت"),
keywords: &[
"تازه\u{200c}سازی",
"جهت حرکت عقربه\u{200c}های ساعت",
"جهت عقربه\u{200c}های ساعت",
"پیکان",
],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("pystynuolet myötäpäivään"),
keywords: &[
"myötäpäivään",
"nuoli",
"pystynuolet myötäpäivään",
"päivitä",
],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("mga clockwise na patayong arrow"),
keywords: &[
"arrow",
"clockwise",
"mga clockwise na patayong arrow",
"patayo",
],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("loddrættir pílar sum ganga við urinum"),
keywords: &[
"loddrættir pílar sum ganga við urinum",
"lodrættir",
"pílar",
"við urinum",
],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèches dans le sens horaire"),
keywords: &[
"flèche",
"flèches dans le sens horaire",
"sens horaire",
"symbole Recharger",
],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("flèches verticales dans le sens horaire"),
keywords: &[
"flèche",
"flèches verticales dans le sens horaire",
"horaire",
"recharger",
"sens des aiguilles d’une montre",
"vertical",
],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("saigheada ingéaracha ar deiseal"),
keywords: &[
"athlódáil",
"ciorcal saighead",
"saigheada",
"saigheada deisil ingearacha",
"saigheada ingéaracha ar deiseal",
],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("saighdean inghearach a’ dol deiseil"),
keywords: &[
"ath-luchdadh",
"ath-luchdaich",
"deiseil",
"saighdean inghearach a’ dol deiseil",
"saighead",
],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frechas verticais xirando cara á dereita"),
keywords: &[
"frechas",
"frechas verticais xirando cara á dereita",
"sentido das agullas do reloxo",
"xirar",
],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("ઊભા દક\u{acd}ષિણાવર\u{acd}તી તીર"),
keywords: &[
"ઊભા દક\u{acd}ષિણાવર\u{acd}તી તીર",
"તીર",
"ફરીથી લોડ કરો",
"ફરીથી લોડ કરોન\u{ac1}\u{a82} ચિહ\u{acd}ન",
],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("kibiyoyi a tsaye ta hannun dama"),
keywords: &[
"kibiya",
"kibiyoyi a tsaye ta hannun dama",
"sake loda",
"ta hannun dama",
],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("חצים אנכיים מתעקלים בכיוון השעון"),
keywords: &[
"חצים",
"חצים אנכיים מתעקלים בכיוון השעון",
"טעינה מחדש",
"מעגל",
],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("घड\u{93c}ी की दिशा म\u{947}\u{902} ल\u{902}बवत तीर"),
keywords: &[
"घड\u{93c}ी की दिशा म\u{947}\u{902} ल\u{902}बवत तीर",
"तीर",
"दक\u{94d}षिणावर\u{94d}त",
"रिलोड",
],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("okomite strelice u smjeru kazaljki na satu"),
keywords: &[
"okomite strelice u smjeru kazaljki na satu",
"ponovno učitavanje",
"smjer kazaljki na satu",
"strelica",
],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("óramutató járásával megegyező irányú függőleges nyilak"),
keywords: &[
"nyíl",
"óramutató járásával megegyező",
"óramutató járásával megegyező irányú függőleges nyilak",
"újratöltés",
],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("ժամասլաքի ուղղությամբ ուղղահայաց սլաքներ"),
keywords: &[
"ժամասլաքի ուղղությամբ",
"ժամասլաքի ուղղությամբ ուղղահայաց սլաքներ",
"սլաք",
"վերաբեռնել",
],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah searah jarum jam"),
keywords: &[
"muat ulang",
"panah",
"searah jarum jam",
"tanda panah searah jarum jam",
],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("njem okirikiri àkụ kwụọtọ"),
keywords: &[
"dọnyegharịa",
"njem okirikiri",
"njem okirikiri àkụ kwụọtọ",
"ube",
],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("endurhlaða"),
keywords: &[
"endurhlaða",
"endurhleðslutákn",
"lóðréttar réttsælisörvar",
"örvar",
],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("frecce verticali che ruotano in senso orario"),
keywords: &[
"frecce",
"frecce a cerchio verticali",
"frecce verticali che ruotano in senso orario",
"frecce verticali senso orario",
"ricarica",
"simbolo ricarica",
],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("右回り縦矢印"),
keywords: &["リロード", "右回り", "右回り縦矢印", "時計回り", "矢印"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("panah ngadeg arahe padha ubenge jam"),
keywords: &[
"arah ubenge jam",
"ngisi maneh",
"panah",
"panah ngadeg arahe padha ubenge jam",
],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ვერტიკალური ისრები საათის მიმართულებით"),
keywords: &[
"გადატვირთვა",
"ვერტიკალური ისრები საათის მიმართულებით",
"ისარი",
"საათის ისრის მიმართულებით",
],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("ineccaben tanila n isegnan n temrilt"),
keywords: &["ineccaben tanila n isegnan n temrilt"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("сағат тілі бойынша тік көрсеткілер"),
keywords: &[
"көрсеткі",
"сағат тілі бойынша",
"сағат тілі бойынша тік көрсеткілер",
"қайта жүктеу",
],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("genindlæse"),
keywords: &["genindlæse", "genindlæse-tegn", "vertikale pile med uret"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some(
"ព\u{17d2}រ\u{17bd}ញបញ\u{17d2}ឈវវ\u{17b7}លតាមទ\u{17b7}ស\u{200b}ទ\u{17d2}រន\u{17b7}ចនាឡ\u{17b7}កា",
),
keywords: &[
"ទ\u{17b7}ស",
"ទ\u{17b7}សដៅ",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញបញ\u{17d2}ឈវវ\u{17b7}លតាមទ\u{17b7}ស\u{200b}ទ\u{17d2}រន\u{17b7}ចនាឡ\u{17b7}កា",
"ព\u{17d2}រ\u{17bd}ញវ\u{17b7}លបញ\u{17d2}ឈរតាមទ\u{17d2}រន\u{17b7}ចនាឡ\u{17b7}កាក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}ងជ\u{17d2}រ\u{17bb}ង",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಪ\u{ccd}ರದಕ\u{ccd}ಷ\u{cbf}ಣವಾಗ\u{cbf} ವರ\u{ccd}ಟ\u{cbf}ಕಲ\u{ccd} ಬಾಣಗಳು"),
keywords: &[
"ಪ\u{ccd}ರದಕ\u{ccd}ಷ\u{cbf}ಣವಾಗ\u{cbf} ವರ\u{ccd}ಟ\u{cbf}ಕಲ\u{ccd} ಬಾಣಗಳು",
"ಬಾಣಗಳು",
"ರ\u{cbf}ಲೋಡ\u{ccd} ಚ\u{cbf}ಹ\u{ccd}ನ\u{cc6}",
"ವರ\u{ccd}ಟ\u{cbf}ಕಲ\u{ccd} ಕ\u{ccd}ಲಾಕ\u{ccd}\u{200c}ವೈಸ\u{ccd} ಬಾಣಗಳು",
],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("시계 방향 화살표"),
keywords: &["새로고침", "새로고침 표시", "시계 방향 화살표", "화살표"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("सव\u{94d}य उब\u{947} बाण"),
keywords: &[
"उजव\u{94d}या दिश\u{947}न वचपी बाण",
"बाण",
"रिलोड",
"सव\u{94d}य",
"सव\u{94d}य उब\u{947} बाण",
],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("саат жебеси менен тик турган жебелер"),
keywords: &[
"жебе",
"саат жебеси",
"саат жебеси менен тик турган жебелер",
],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("vertikal Feiler an Zärerichtung"),
keywords: &[
"Feil",
"an Zärerichtung",
"opzéien",
"vertikal Feiler an Zärerichtung",
],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນຕາມທ\u{eb4}ດເຂ\u{eb1}ມໂມງ"),
keywords: &[
"ທ\u{eb4}ດເຂ\u{eb1}ມໂມງ",
"ລ\u{eb9}ກສອນ",
"ລ\u{eb9}ກສອນຕາມທ\u{eb4}ດເຂ\u{eb1}ມໂມງ",
],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("vertikalios laikrodžio rodyklės kryptimis nukreiptos rodyklės"),
keywords: &[
"pagal laikrodžio rodyklę",
"rodyklė",
"vertikalios laikrodžio rodyklės kryptimis nukreiptos rodyklės",
],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("pulksteņrādītāja virzienā vērstas vertikālas bultiņas"),
keywords: &[
"bultiņa",
"pulksteņrādītāja virzienā",
"pulksteņrādītāja virzienā vērstas vertikālas bultiņas",
],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pere poutū whakatekaraka"),
keywords: &[
"pere",
"pere poutū whakatekaraka",
"uta anō",
"whakatekaraka",
],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("вертикални стрелки што вртат надесно"),
keywords: &[
"вертикални стрелки што вртат надесно",
"десно",
"стрелки",
"часовник",
],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some("ഘടിക\u{d3e}ര ദിശയില\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളങ\u{d4d}ങൾ"),
keywords: &[
"അമ\u{d4d}പടയ\u{d3e}ളം",
"ഘടിക\u{d3e}ര ദിശയില\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളങ\u{d4d}ങൾ",
"ഘടിക\u{d3e}ര ദിശയില\u{d41}ള\u{d4d}ള രണ\u{d4d}ട\u{d4d} ആരോകൾ",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("цагийн зүүний босоо сум"),
keywords: &["босоо", "сум", "цагийн зүү", "цагийн зүүний босоо сум"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some(
"घड\u{94d}\u{200d}याळाच\u{94d}या दिश\u{947}त असल\u{947}ल\u{947} अन\u{941}ल\u{902}ब बाण",
),
keywords: &[
"घड\u{94d}याळाच\u{94d}या दिश\u{947}त",
"घड\u{94d}\u{200d}याळाच\u{94d}या दिश\u{947}त असल\u{947}ल\u{947} अन\u{941}ल\u{902}ब बाण",
"बाण",
"रीलोड करा",
],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("anak panah menegak ikut jam"),
keywords: &[
"anak panah",
"anak panah menegak ikut jam",
"ikut arah jam",
"muat semula",
],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("vleġeġ vertikali favur l-arloġġ"),
keywords: &[
"favur l-arloġġ",
"terġa’ ttella’",
"vleġeġ vertikali favur l-arloġġ",
"vleġġa",
],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("ညာရစ\u{103a} ဒေါင\u{103a}လ\u{102d}\u{102f}က\u{103a}ပြ မြားများ"),
keywords: &[
"ညာရစ\u{103a} ဒေါင\u{103a}လ\u{102d}\u{102f}က\u{103a}ပြ မြားများ",
"နာရ\u{102e}လက\u{103a}တ\u{1036}လားရာ",
"နာရ\u{102e}လက\u{103a}တ\u{1036}လားရာအတ\u{102d}\u{102f}င\u{103a}း ညာရစ\u{103a}ပြ ဒေါင\u{103a}လ\u{102d}\u{102f}က\u{103a} မြားများ",
"မြား",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("vertikale piler med klokken"),
keywords: &["last inn på nytt", "piler", "vertikale piler med klokken"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("घडीको दिशातिर ठाडो वाणहर\u{942}"),
keywords: &[
"घडीको दिशा तिर",
"घडीको दिशातिर ठाडो वाणहर\u{942}",
"प\u{941}नः लोड",
"वाण",
],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("verticale pijlen met de klok mee"),
keywords: &[
"met de klok mee",
"opnieuw laden",
"pijl",
"verticale pijlen met de klok mee",
],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("vertikale piler med klokka"),
keywords: &["last inn på nytt", "piler", "vertikale piler med klokka"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some(
"ଘଣ\u{b4d}ଟ\u{b3e}କଣ\u{b4d}ଟ\u{b3e} ଘ\u{b42}ର\u{b3f}ବ\u{b3e} ଦ\u{b3f}ଗରେ ଭ\u{b42}ଲମ\u{b4d}ବ ତୀରଗ\u{b41}ଡ\u{b3c}\u{b3f}କ",
),
keywords: &[
"ଘଣ\u{b4d}ଟ\u{b3e}କଣ\u{b4d}ଟ\u{b3e} ଘ\u{b42}ର\u{b3f}ବ\u{b3e} ଦ\u{b3f}ଗରେ",
"ଘଣ\u{b4d}ଟ\u{b3e}କଣ\u{b4d}ଟ\u{b3e} ଘ\u{b42}ର\u{b3f}ବ\u{b3e} ଦ\u{b3f}ଗରେ ଭ\u{b42}ଲମ\u{b4d}ବ ତୀରଗ\u{b41}ଡ\u{b3c}\u{b3f}କ",
"ତୀର",
"ପ\u{b41}ନଃଲୋଡ\u{b4d}",
],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਘੜੀ ਦੀ ਦਿਸ\u{a3c}ਾ ਵਿ\u{a71}ਚ ਲ\u{a70}ਬਕਾਰੀ ਤੀਰ"),
keywords: &[
"ਘੜੀ ਦੀ ਦਿਸ\u{a3c}ਾ ਵਿ\u{a71}ਚ ਲ\u{a70}ਬਕਾਰੀ ਤੀਰ",
"ਤੀਰ",
"ਰੀਲ\u{a4b}ਡ",
"ਰੀਲ\u{a4b}ਡ ਨਿਸ\u{a3c}ਾਨ",
"ਲ\u{a70}ਬਕਾਰੀ ਘੜੀ ਦੀ ਦਿਸ\u{a3c}ਾ ਵਿ\u{a71}ਚ ਤੀਰ",
],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("گھڑی وار عمودی تیر"),
keywords: &["تیر", "فیر لوڈ کرنا", "گھڑی وار", "گھڑی وار عمودی تیر"],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("Aro-dẹm De Múv Klọ\u{301}k Hánd Dairẹ\u{301}kshọn"),
keywords: &[
"Aro-dẹm De Múv Klọ\u{301}k Hánd Dairẹ\u{301}kshọn",
"Klọ\u{301}k Hánd Dairẹ\u{301}kshọn",
"Rilod",
"Áro",
],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("pionowe strzałki zgodne z ruchem wskazówek zegara"),
keywords: &[
"odśwież",
"okrężny",
"pionowe strzałki zgodne z ruchem wskazówek zegara",
"pionowe strzałki zgodnie z ruchem wskazówek zegara",
"strzałka",
],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("کلاک وايز عمودي غشي"),
keywords: &["بيا لوډ", "غشي", "کلاک وايز", "کلاک وايز عمودي غشي"],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("setas verticais no sentido horário"),
keywords: &[
"recarregar",
"sentido horário",
"seta",
"setas verticais no sentido horário",
],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("setas verticais no sentido dos ponteiros do relógio"),
keywords: &[
"recarregar",
"sentido dos ponteiros do relógio",
"seta",
"setas verticais no sentido dos ponteiros do relógio",
],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("Sayanpa wachʼikuna phani qatichkanku"),
keywords: &["Sayanpa wachʼikuna phani qatichkanku"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("săgeți verticale în sens orar"),
keywords: &[
"reîncărcare",
"sensul acelor de ceasornic",
"săgeți",
"săgeți verticale în sens orar",
],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-703"),
keywords: &["E10-703"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("по часовой стрелке"),
keywords: &[
"вертикальные стрелки",
"замкнутые вертикальные стрелки",
"замкнутые стрелки",
"по часовой стрелке",
"стрелки",
],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("utwambi duhagaritse tureba mu cyerekezo cy’inshinge z’isaha"),
keywords: &[
"kongera gushyiramo",
"ukurikije icyerekezo nk’inshinge z’isaha",
"umwambi",
"utwambi duhagaritse tureba mu cyerekezo cy’inshinge z’isaha",
],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("ڪلاڪ وانگي عمودي تير"),
keywords: &["تير", "ريلوڊ", "ڪلاڪ وانگي", "ڪلاڪ وانگي عمودي تير"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("දක\u{dca}ෂ\u{dd2}ණ\u{dcf}වර\u{dca}ත ස\u{dd2}රස\u{dca} ඊතල"),
keywords: &[
"ඊතලය",
"දක\u{dca}ෂ\u{dd2}ණ\u{dcf}වර\u{dca}ත",
"දක\u{dca}ෂ\u{dd2}ණ\u{dcf}වර\u{dca}ත ස\u{dd2}රස\u{dca} ඊතල",
"නැවත ප\u{dd6}රණය",
],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("zvislé šípky v smere hodinových ručičiek"),
keywords: &[
"v smere hodinových ručičiek",
"znovu načítať",
"zvislé šípky v smere hodinových ručičiek",
"šípka",
],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("navpični puščici, ki se ukrivljata v smeri urnega kazalca"),
keywords: &[
"navpični puščici, ki se ukrivljata v smeri urinega kazalca",
"navpični puščici, ki se ukrivljata v smeri urnega kazalca",
"puščica",
"smer urinega kazalca",
"vnovično nalaganje",
],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("fallaarada taaggan ee saacad-wareegga"),
keywords: &[
"dib -usoo kicinta",
"fallaar",
"fallaarada taaggan ee saacad-wareegga",
"saacad-wareeg",
],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("shigjeta vertikale orare"),
keywords: &[
"orar",
"orare",
"ringarko",
"shigjeta vertikale orare",
"shigjetë",
],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелице које се врте удесно"),
keywords: &[
"пoнoвнo учитaвaњe",
"смер кaзaљке нa сaту",
"стрeлицa",
"стрелице које се врте удесно",
],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("↑↑↑"),
keywords: &["стрелице које се врте удесно"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelice koje se vrte udesno"),
keywords: &[
"ponovno učitavanje",
"smer kazaljke na satu",
"strelica",
"strelice koje se vrte udesno",
],
},
#[cfg(feature = "sr_Latn_BA")]
crate::Annotation {
lang: "sr_Latn_BA",
tts: None,
keywords: &["strelice koje se vrte udesno"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("vertikala pilar medurs"),
keywords: &[
"pilar",
"pilar medurs",
"uppdateringssymbol",
"vertikala pilar medurs",
],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mishale wima inayoelekeza kwa mzunguko wa akrabu"),
keywords: &[
"mishale wima inayoelekeza kwa mzunguko wa akrabu",
"mshale",
"mzunguko wa akrabu",
"pakia upya",
],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some("வலஞ\u{bcd}சுழி அம\u{bcd}புக\u{bcd}குறிகள\u{bcd} பொத\u{bcd}த\u{bbe}ன\u{bcd}"),
keywords: &[
"புதுப\u{bcd}பிப\u{bcd}பு",
"ம\u{bbe}ற\u{bcd}று",
"ர\u{bc0}லோட\u{bcd} சிம\u{bcd}பல\u{bcd}",
"வலஞ\u{bcd}சுழி அம\u{bcd}புக\u{bcd}குறிகள\u{bcd} பொத\u{bcd}த\u{bbe}ன\u{bcd}",
],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("సవ\u{c4d}యద\u{c3f}శల\u{c4b} న\u{c3f}లువు బ\u{c3e}ణ\u{c3e}లు"),
keywords: &[
"మళ\u{c4d}ల\u{c40} ల\u{c4b}డ\u{c4d} చ\u{c47}య\u{c3f}",
"ర\u{c40}ల\u{c4b}డ\u{c4d} చ\u{c3f}హ\u{c4d}నం",
"సవ\u{c4d}యద\u{c3f}శల\u{c4b} న\u{c3f}లువు బ\u{c3e}ణ\u{c3e}లు",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("тирҳои амудии самти ҳаракати ақрабҳои соат"),
keywords: &[
"аз нав бор кардан",
"самти ҳаракати ақрабҳо соат",
"тир",
"тирҳои амудии самти ҳаракати ақрабҳои соат",
],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("ส\u{e31}ญล\u{e31}กษณ\u{e4c}โหลดซ\u{e49}ำ"),
keywords: &[
"ซ\u{e49}ำ",
"ตามเข\u{e47}มนาฬ\u{e34}กา",
"ล\u{e39}กศร",
"ส\u{e31}ญล\u{e31}กษณ\u{e4c}โหลดซ\u{e49}ำ",
"โหลด",
],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("ከመ ሰዓት ዝዘውር ትኹል ምልክት"),
keywords: &["ምልክት", "እንደገና ምጽዓን", "ከመ ሰዓት", "ከመ ሰዓት ዝዘውር ትኹል ምልክት"],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("sagadyň ugruna dik oklar"),
keywords: &[
"gaýtadan ýükle",
"ok",
"sagadyň ugruna",
"sagadyň ugruna dik oklar",
],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau vilovilo toʻomataʻu"),
keywords: &["mataʻu", "ngahau", "ngahau vilovilo toʻomataʻu", "vilovilo"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("saat yönünde dönen oklar"),
keywords: &[
"oklar",
"saat yönünde dikey oklar",
"saat yönünde dönen oklar",
"saat yönüne dönen oklar",
"yeniden yükle işareti",
],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("سائەت يۆنىلىشىدىكى تىك كۆرسەتكۈچ"),
keywords: &[
"سائەت يۆنىلىشى",
"سائەت يۆنىلىشىدىكى تىك كۆرسەتكۈچ",
"قايتا يۈكلەش",
"كۆرسەتكۈچ",
],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("вертикальні стрілки за ходом годинника"),
keywords: &[
"вертикальні стрілки за ходом годинника",
"за годинниковою стрілкою",
"перезавантажити",
"стрілка",
"стрілки по колу за годинниковою стрілкою",
],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("گھڑی کی سمت میں عمودی تیر"),
keywords: &[
"تیر",
"سائن",
"سمت",
"گھڑی کی سمت",
"گھڑی کی سمت میں عمودی تیر",
],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("soat mili yo‘nalishi bo‘yicha"),
keywords: &["chiziq", "qayta yuklash", "soat mili yo‘nalishi bo‘yicha"],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("mũi tên thẳng đứng theo chiều kim đồng hồ"),
keywords: &[
"chiều kim đồng hồ",
"mũi tên",
"mũi tên thẳng đứng theo chiều kim đồng hồ",
"tải lại",
],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("fett yu ci yoonu montar"),
keywords: &[
"ci yoonu montar",
"fett",
"fett yu ci yoonu montar",
"yabaat",
],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("amatolo athe nkqo ajikeleza ngasekunene"),
keywords: &[
"amatolo athe nkqo ajikeleza ngasekunene",
"jikeleza ngasekunene",
"layisha kwakhona",
"utolo",
],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("awọn ọfa yiyi ọna ago"),
keywords: &[
"awọn ọfa yiyi ọna ago",
"àmì ìtó\u{329}sọ\u{301}nà o\u{329}lọ\u{301}fà",
"àtúngbé",
"ọlọ\u{301}wọ\u{301}-aago",
],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("順時針垂直箭咀"),
keywords: &["箭咀", "重新載入", "順時針", "順時針垂直箭咀"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("顺时针垂直箭咀"),
keywords: &["箭咀", "重新载入", "顺时针", "顺时针垂直箭咀"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("顺时针垂直箭头"),
keywords: &["方向", "标识", "重新载入", "顺时针垂直箭头"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("順時針方向"),
keywords: &["順時針", "順時針方向"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("順時針方向垂直嘅箭嘴"),
keywords: &["順時針方向", "順時針方向垂直嘅箭嘴"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("imicibisholo ejikelezela ngakwesokudla"),
keywords: &[
"faka kabusha",
"imicibisholo ejikelezela ngakwesokudla",
"jikelezela ngakwesokudla",
"umcisholo",
],
},
],
};
#[doc = "🔄"]
pub const COUNTERCLOCKWISE_ARROWS_BUTTON: crate::Emoji = crate::Emoji {
glyph: "🔄",
codepoint: "1F504",
status: crate::Status::FullyQualified,
introduction_version: 1f32,
name: "counterclockwise arrows button",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("knoppie vir teenkloksgewyse pyle"),
keywords: &[
"agteruit",
"antikloksgewys",
"knoppie",
"knoppie vir teenkloksgewyse pyle",
"pyl",
"teenkloksgewys",
"teenkloksgewyse pyle",
],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("በሰዓት አዟዟር አቅጣጫ ተቃራኒ ያሉ ቀስቶች አዝራር"),
keywords: &[
"ቀስት",
"በሰዓት አቆጣጠር አቅጣጫ ተቃራኒ",
"በሰዓት አዟዟር አቅጣጫ ተቃራኒ ያሉ ቀስቶች አዝራር",
"ኋልዮሽ",
],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("زر عكس اتجاه عقارب الساعة"),
keywords: &[
"أسهم",
"زر عكس اتجاه عقارب الساعة",
"عكس اتجاه عقارب الساعة",
],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some(
"ঘড\u{9bc}ী ক\u{9be}\u{981}ট\u{9be}ৰ বিপৰীত দিশৰ উলম\u{9cd}ব ক\u{9be}\u{981}ড\u{9bc}",
),
keywords: &[
"ক\u{9be}\u{981}ড\u{9bc}",
"ঘড\u{9bc}ী ক\u{9be}\u{981}ট\u{9be}ৰ দিশৰ উলম\u{9cd}ব ক\u{9be}\u{981}ড\u{9bc} ব\u{9c1}ট\u{9be}ম",
"ঘড\u{9bc}ী ক\u{9be}\u{981}ট\u{9be}ৰ বিপৰীত দিশৰ উলম\u{9cd}ব ক\u{9be}\u{981}ড\u{9bc}",
"ঘড\u{9bc}ীৰ ক\u{9be}\u{981}ট\u{9be}ৰ বিপৰীত দিশ",
"ব\u{9be}ম\u{9be}ৱৰ\u{9cd}তী",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("saat istiqamətinin əksinə yönəlmiş oxlar düyməsi"),
keywords: &[
"ox",
"saat istiqaməti əksinə",
"saat istiqamətinin əksinə yönəlmiş oxlar düyməsi",
],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("кнопка са стрэлкамі супраць гадзіннікавай стрэлкі"),
keywords: &[
"абнавіць",
"кнопка",
"кнопка са стрэлкамі супраць гадзіннікавай стрэлкі",
"стрэлка",
"супраць гадзіннікавай стрэлкі",
],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("бутон със стрелки срещу часовниковата стрелка"),
keywords: &[
"бутон със стрелки срещу часовниковата стрелка",
"срещу часовниковата стрелка",
"стрелка",
],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("ঘড\u{9bc}ির ক\u{9be}\u{981}ট\u{9be}র বিপরীতে তীর বোত\u{9be}ম"),
keywords: &[
"ঘড\u{9bc}ির ক\u{9be}\u{981}ট\u{9be}র বিপরীত দিকে",
"ঘড\u{9bc}ির ক\u{9be}\u{981}ট\u{9be}র বিপরীত দিকে ঘোর\u{9be}",
"ঘড\u{9bc}ির ক\u{9be}\u{981}ট\u{9be}র বিপরীতে তীর বোত\u{9be}ম",
"তীর",
],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelice suprotno od smjera kretanja sata"),
keywords: &[
"smjer suprotan kretanju kazaljki na satu",
"strelica",
"strelice dugme suprotno kretanju kazaljki na satu",
"strelice suprotno od smjera kretanja sata",
],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("botó de fletxes que giren cap a l’esquerra"),
keywords: &[
"botó de fletxes que giren cap a l’esquerra",
"esquerra",
"fletxes en sentit antihorari",
"gir",
"sentit antihorari",
],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᏩᏥ ᎦᏏᏅᏍᏔᏅ ᎪᏪᎵ ᏗᎦᏂᏱᏙᏗ"),
keywords: &[
"ᎠᏗᏆᎸᏕᏲᎰᎢ ᎠᎦᏍᎦᏂ ᏗᏜ",
"ᎦᏝᏗ",
"ᏩᏥ ᎦᏏᏅᏍᏔᏅ",
"ᏩᏥ ᎦᏏᏅᏍᏔᏅ ᎪᏪᎵ ᏗᎦᏂᏱᏙᏗ",
"ᏩᏥ ᎦᏏᏅᏍᏔᏅᎢ",
"ᏩᏥ ᎦᏏᏅᏍᏔᏅᎢ ᎦᏝᏗ ᎦᏗ",
],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("šipky proti směru hodinových ručiček"),
keywords: &[
"doleva",
"hodin",
"proti",
"ručiček",
"směr",
"směru",
"šipka",
"šipky proti směru hodinových ručiček",
],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("botwm saethau gwrthglocwedd"),
keywords: &[
"botwm saethau gwrthglocwedd",
"gwrthglocwedd",
"o chwith",
"saeth",
],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("vandrette cirkulære pile mod uret"),
keywords: &[
"cirkulære pile",
"mod uret",
"vandrette cirkulære pile mod uret",
"vandrette pile",
],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("Pfeile gegen den Uhrzeigersinn"),
keywords: &[
"Pfeile",
"Pfeile gegen den Uhrzeigersinn",
"gegen den Uhrzeigersinn",
"kreisförmige Pfeile gegen den Uhrzeigersinn",
],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("κουμπί αριστερόστροφων βελών"),
keywords: &[
"αντίστροφα",
"αριστερόστροφο",
"βέλος",
"κουμπί αριστερόστροφων βελών",
],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("counterclockwise arrows button"),
keywords: &[
"anticlockwise",
"arrow",
"counterclockwise",
"counterclockwise arrows button",
"withershins",
],
},
#[cfg(feature = "en_001")]
crate::Annotation {
lang: "en_001",
tts: Some("anticlockwise arrows button"),
keywords: &["anticlockwise arrows button"],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &[
"anticlockwise",
"anticlockwise arrows button",
"arrow",
"counterclockwise",
"counterclockwise arrows button",
"withershins",
],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flechas en sentido antihorario"),
keywords: &[
"dirección",
"flechas",
"flechas en sentido antihorario",
"sentido antihorario",
"señal de recarga",
],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("flechas horizontales en sentido horario"),
keywords: &[
"flechas",
"flechas horizontales en sentido horario",
"flechas horizontales sentido horario",
"horario",
"señal de recarga",
],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("botón de flechas contrasentido horario"),
keywords: &[
"antisentido horario",
"botón de flechas contrasentido horario",
"contrasentido horario",
"dirección contraria al sol",
"flecha",
],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("nooled vastupäeva"),
keywords: &["nool", "nooled vastupäeva", "vastupäeva"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("ezkerrera gezien botoia"),
keywords: &["ezkerrera", "ezkerrera gezien botoia", "gezi"],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("خلاف جهت عقربه\u{200c}های ساعت"),
keywords: &[
"خلاف جهت عقربه\u{200c}های ساعت",
"خلاف حرکت عقربه\u{200c}های ساعت",
"پیکان",
],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("nuolet vastapäivään"),
keywords: &["nuolet vastapäivään", "nuoli", "vastapäivään"],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("mga counterclockwise na arrow"),
keywords: &[
"anticlockwise",
"arrow",
"button",
"counterclockwise",
"mga counterclockwise na arrow",
"withershins",
],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("vatnrættir pílar sum ganga ímóti urinum"),
keywords: &[
"pílar",
"vatnrættir",
"vatnrættir pílar sum ganga ímóti urinum",
"ímóti urinum",
],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèches dans le sens antihoraire"),
keywords: &[
"flèche",
"flèches dans le sens antihoraire",
"sens antihoraire",
],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("flèches verticales dans le sens antihoraire"),
keywords: &[
"antihoraire",
"flèche",
"flèches verticales dans le sens antihoraire",
"sens inverse des aiguilles d’une montre",
],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("cnaipe saighead tuathail"),
keywords: &[
"cnaipe saighead tuathail",
"saigheada",
"saigheada tuathail",
"tuathal",
],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("putan saighdean a’ dol tuathail"),
keywords: &["putan saighdean a’ dol tuathail", "saighead", "tuathail"],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frechas xirando cara á esquerda"),
keywords: &[
"frechas",
"frechas xirando cara á esquerda",
"reiniciar",
"sentido contrario ás agullas do reloxo",
"xirar",
],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("વામાવર\u{acd}તી તીર બટન"),
keywords: &["તીર", "વામાવર\u{acd}તી", "વામાવર\u{acd}તી તીર બટન"],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("maɓallin kibiyoyi ta hannun hagu"),
keywords: &[
"akasin hanyar rana",
"kibiya",
"maɓallin kibiyoyi ta hannun hagu",
"ta hannun hagu",
],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("לחצן חצים נגד כיוון השעון"),
keywords: &["חץ", "לחצן חצים נגד כיוון השעון", "נגד כיוון השעון"],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("घड\u{93c}ी की उल\u{94d}टी दिशा म\u{947}\u{902} तीर"),
keywords: &[
"घड\u{93c}ी की उल\u{94d}टी दिशा म\u{947}\u{902} तीर",
"तीर",
"वामावर\u{94d}त",
],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("tipka sa strelicama ulijevo"),
keywords: &[
"strelica",
"suprotno od kazaljki na satu",
"tipka sa strelicama ulijevo",
],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("óramutató járásával ellentétes irányú nyilak gombja"),
keywords: &[
"nyíl",
"óramutató járásával ellentétes",
"óramutató járásával ellentétes irányú nyilak gombja",
],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("ժամասլաքին հակառակ ուղղությամբ սլաքներով կոճակ"),
keywords: &[
"ժամասլաքին հակառակ ուղղությամբ",
"ժամասլաքին հակառակ ուղղությամբ սլաքներով կոճակ",
"հակառակ ուղղությամբ",
"սլաք",
],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah berlawanan arah jarum jam"),
keywords: &[
"berlawan arah matahari",
"berlawanan jarum jam",
"kebalikan arah jarum jam",
"panah",
"tanda panah berlawanan arah jarum jam",
],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("ọdụmbi ube nke enweghị ǹjem okirikiri"),
keywords: &[
"enweghị ǹjem okirikiri",
"nduzịabụghị ya",
"ube",
"ọdụmbi ube nke enweghị ǹjem okirikiri",
"ọgụu ǹjem okirikiri",
],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("rangsælis örvar"),
keywords: &["rangsælis", "örvar"],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("pulsante con frecce che ruotano in senso antiorario"),
keywords: &[
"antiorario",
"frecce",
"frecce a cerchio aperto",
"frecce senso antiorario",
"pulsante con frecce che ruotano in senso antiorario",
],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("左回り矢印"),
keywords: &["リピート", "反時計回り", "左回り矢印", "矢印", "繰り返し"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("tombol panah arahe walikan ubenge jam"),
keywords: &[
"panah",
"tombol panah arahe walikan ubenge jam",
"walikan arahe srengenge",
"walikan ubenge jam",
],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("საათის მიმართულების საწინააღმდეგო ისრების ღილაკი"),
keywords: &[
"ისარი",
"საათის ისრის საწინააღმდეგოდ",
"საათის მიმართულების საწინააღმდეგო ისრების ღილაკი",
],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("taqeffalt n ineccaben n tnila yettin n isegnan n temrilt"),
keywords: &["taqeffalt n ineccaben n tnila yettin n isegnan n temrilt"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("сағат тіліне қарама-қарсы көрсеткілер түймесі"),
keywords: &[
"күннің бағытына қарама-қарсы",
"көрсеткі",
"сағат тіліне қарама-қарсы",
"сағат тіліне қарама-қарсы көрсеткілер түймесі",
],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("mod uret"),
keywords: &["mod uret", "pile", "pile mod uret"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some(
"ព\u{17d2}រ\u{17bd}ញវ\u{17b7}លជារង\u{17d2}វង\u{17cb}ច\u{17d2}រាសទ\u{17d2}រន\u{17b7}ចនាឡ\u{17b7}កាក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}ងជ\u{17d2}រ\u{17bb}ង",
),
keywords: &[
"ទ\u{17b7}ស",
"ទ\u{17b7}សដៅ",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញវ\u{17b7}លជារង\u{17d2}វង\u{17cb}ច\u{17d2}រាសទ\u{17d2}រន\u{17b7}ចនាឡ\u{17b7}កាក\u{17d2}ន\u{17bb}ងរាងប\u{17bd}ងជ\u{17d2}រ\u{17bb}ង",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಅಪ\u{ccd}ರದಕ\u{ccd}ಷ\u{cbf}ಣವಾಗ\u{cbf} ಬಾಣಗಳ ಬಟನ\u{ccd}"),
keywords: &[
"ಅಪ\u{ccd}ರದಕ\u{ccd}ಷ\u{cbf}ಣವಾಗ\u{cbf}",
"ಅಪ\u{ccd}ರದಕ\u{ccd}ಷ\u{cbf}ಣವಾಗ\u{cbf} ಬಾಣಗಳ ಬಟನ\u{ccd}",
"ಬಾಣಗಳು",
],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("반시계 방향 화살표"),
keywords: &["반시계 방향", "반시계 방향 화살표", "화살표"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("वाम बाणा\u{902}चो बटण"),
keywords: &[
"अपसव\u{94d}य",
"दाव\u{94d}या दिश\u{947}न वचपी बाण",
"बाण",
"वाम बाणा\u{902}चो बटण",
"विथरशिन\u{94d}स",
],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("саат жебесине каршы жебелер"),
keywords: &["жебе", "саат жебесине каршы", "саат жебесине каршы жебелер"],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("vertikal Feiler géint Zärerichtung"),
keywords: &[
"Feil",
"a géigegesat Richtung",
"counterclockwise",
"géint Zärerichtung",
"vertikal Feiler géint Zärerichtung",
],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນທວນທ\u{eb4}ດເຂ\u{eb1}ມໂມງ"),
keywords: &[
"ທວນທ\u{eb4}ດເຂ\u{eb1}ມໂມງ",
"ລ\u{eb9}ກສອນ",
"ລ\u{eb9}ກສອນທວນທ\u{eb4}ດເຂ\u{eb1}ມໂມງ",
"ລ\u{ebb}ງລ\u{eb8}\u{ec8}ມ",
],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("prieš laikrodžio rodyklę nukreiptų rodyklių mygtukas"),
keywords: &[
"prieš laikrodžio rodyklę",
"prieš laikrodžio rodyklę nukreiptų rodyklių mygtukas",
"rodyklė",
],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("pretēji pulksteņrādītāja virzienam vērstas bultiņas"),
keywords: &[
"bultiņa",
"pretēji pulksteņrādītāja virzienam",
"pretēji pulksteņrādītāja virzienam vērstas bultiņas",
],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pātene pere kōaro"),
keywords: &["kōaro", "pere", "pātene pere kōaro"],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("копче со стрелки што вртат налево"),
keywords: &[
"копче со стрелки што вртат налево",
"лево",
"спротивно",
"стрелки",
"часовник",
],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some(
"വിപരീത ഘടിക\u{d3e}ര ദിശയില\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളങ\u{d4d}ങള\u{d41}ടെ ബട\u{d4d}ടൺ",
),
keywords: &[
"അമ\u{d4d}പടയ\u{d3e}ളം",
"വിപരീത ഘടിക\u{d3e}ര ദിശയില\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളങ\u{d4d}ങള\u{d41}ടെ ബട\u{d4d}ടൺ",
"വിപരീത ഘടിക\u{d3e}ര ദിശയില\u{d41}ള\u{d4d}ള രണ\u{d4d}ട\u{d4d} ആരോകൾ",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("цагийн зүүний эсрэг товчлуур"),
keywords: &[
"товчлуур",
"цагийн зүү",
"цагийн зүүний эсрэг товчлуур",
"эсрэг",
],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some(
"घड\u{94d}\u{200d}याळाच\u{94d}या विर\u{942}द\u{94d}ध दिश\u{947}त असल\u{947}ल\u{947} बाण",
),
keywords: &[
"घड\u{94d}याळाच\u{94d}या दिश\u{947}त",
"घड\u{94d}याळाच\u{94d}या दिश\u{947}त असल\u{947}ल\u{947} बाण",
"घड\u{94d}याळाच\u{94d}या विर\u{942}द\u{94d}ध दिश\u{947}त",
"घड\u{94d}\u{200d}याळाच\u{94d}या विर\u{942}द\u{94d}ध दिश\u{947}त असल\u{947}ल\u{947} बाण",
"बाण",
"वायरशिन\u{94d}स",
],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("butang anak panah lawan jam"),
keywords: &[
"anak panah",
"arah lawan jam",
"bertentangan arah jam",
"butang anak panah lawan jam",
"lawan jam",
],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("buttuna vleġeġ kontra l-arloġġ"),
keywords: &[
"buttuna vleġeġ kontra l-arloġġ",
"kontra l-arloġġ",
"vleġġa",
"withershins",
],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("ဘယ\u{103a}ရစ\u{103a} မြားများ ခလ\u{102f}တ\u{103a}"),
keywords: &[
"နာရ\u{102e}လက\u{103a}တ\u{1036}လားရာ ဆန\u{1037}\u{103a}ကျင\u{103a}ဘက\u{103a}",
"နာရ\u{102e}လက\u{103a}တ\u{1036}လားရာ ဆန\u{1037}\u{103a}ကျင\u{103a}ဘက\u{103a}ပြ မြားများ ခလ\u{102f}တ\u{103a}",
"နေလားရာန\u{103e}င\u{1037}\u{103a} ဆန\u{1037}\u{103a}ကျင\u{103a}ဘက\u{103a}ပြ မြားများ",
"ဘယ\u{103a}ရစ\u{103a} မြားများ ခလ\u{102f}တ\u{103a}",
"မြား",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("piler mot klokken"),
keywords: &["mot klokken", "piler", "piler mot klokken"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("घडीको दिशाविपरीततिर वाणहर\u{942}को बटन"),
keywords: &[
"घडीको दिशा तिर",
"घडीको दिशा विपरीत तिर",
"घडीको दिशाविपरीततिर वाणहर\u{942}को बटन",
"वाण",
"विथरसिन\u{94d}न",
],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("pijlen tegen de klok in"),
keywords: &[
"achteruit",
"in tegengestelde richting",
"pijl",
"pijlen tegen de klok in",
"tegen de klok in",
],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("piler mot klokka"),
keywords: &["mot klokka", "piler", "piler mot klokka"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some(
"ଘଣ\u{b4d}ଟ\u{b3e}କଣ\u{b4d}ଟ\u{b3e}ର ଘ\u{b42}ର\u{b3f}ବ\u{b3e}ର ବ\u{b3f}ପରୀତ ଦ\u{b3f}ଗରେ ତୀର ବଟନ\u{b4d}\u{200c}",
),
keywords: &[
"ଘଣ\u{b4d}ଟ\u{b3e}କଣ\u{b4d}ଟ\u{b3e} ଘ\u{b42}ର\u{b3f}ବ\u{b3e}ର ବ\u{b3f}ପରୀତ ଦ\u{b3f}ଗରେ",
"ଘଣ\u{b4d}ଟ\u{b3e}କଣ\u{b4d}ଟ\u{b3e}ର ଘ\u{b42}ର\u{b3f}ବ\u{b3e}ର ବ\u{b3f}ପରୀତ ଦ\u{b3f}ଗରେ ତୀର ବଟନ\u{b4d}\u{200c}",
"ଘଣ\u{b4d}ଟ\u{b3e}କଣ\u{b4d}ଟ\u{b3e}ର ବ\u{b3f}ପରୀତ ଦ\u{b3f}ଗରେ",
"ତୀର",
"ୱ\u{b3f}ଦରଶ\u{b3f}ନ\u{b4d}ସ",
],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਘੜੀ ਦੀ ਉਲਟ ਦਿਸ\u{a3c}ਾ ਵਿ\u{a71}ਚ ਤੀਰ ਬਟਨ"),
keywords: &[
"ਘੜੀ ਦੀ ਉਲਟ ਦਿਸ\u{a3c}ਾ",
"ਘੜੀ ਦੀ ਉਲਟ ਦਿਸ\u{a3c}ਾ ਵਿ\u{a71}ਚ ਤੀਰ",
"ਘੜੀ ਦੀ ਉਲਟ ਦਿਸ\u{a3c}ਾ ਵਿ\u{a71}ਚ ਤੀਰ ਬਟਨ",
"ਤੀਰ",
],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("مخالف گھڑی وار تیراں دا بٹن"),
keywords: &[
"تیر",
"خلاف\u{650} آفتاب",
"مخالف گھڑی وار",
"مخالف گھڑی وار تیراں دا بٹن",
],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("Aro-dẹm Nọ\u{301} De Múv Klọ\u{301}k Hánd Wè"),
keywords: &[
"Aro-dẹm Nọ\u{301} De Múv Klọ\u{301}k Hánd Dairẹ\u{301}kshọn",
"Aro-dẹm Nọ\u{301} De Múv Klọ\u{301}k Hánd Wè",
"Bọ\u{301}tin Ópózít To Sọn Dairẹ\u{301}kshọn",
"Nọ\u{301} Bi Klọ\u{301}k Hánd Dairẹ\u{301}kshọn Áro",
],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("przycisk ze strzałkami przeciwnie do ruchu wskazówek zegara"),
keywords: &[
"dookoła",
"okrężny",
"przycisk ze strzałkami przeciwnie do ruchu wskazówek zegara",
"przycisk ze strzałkami w kierunku przeciwnym do ruchu wskazówek zegara",
"strzałka",
],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("خلاف کلاک وايز غشي بټن"),
keywords: &[
"خلاف کلاک وايز",
"خلاف کلاک وايز غشي بټن",
"غشي",
"ويدر شاين",
],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("botão de setas em sentido anti-horário"),
keywords: &[
"anti-horário",
"botão de setas em sentido anti-horário",
"botão de setas no sentido anti-horário",
"sentido anti-horário",
"seta",
],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("setas no sentido contrário ao dos ponteiros do relógio"),
keywords: &[
"sentido contrário ao dos ponteiros do relógio",
"seta",
"setas no sentido contrário ao dos ponteiros do relógio",
],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("buton cu săgeți în sens antiorar"),
keywords: &[
"antiorar",
"buton cu săgeți în sens antiorar",
"invers",
"sens invers acelor de ceasornic",
],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-704"),
keywords: &["E10-704"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("против часовой стрелки"),
keywords: &[
"горизонтальные стрелки",
"замкнутые горизонтальные стрелки",
"замкнутые стрелки",
"против часовой стрелки",
"стрелки",
],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("utwambi duhagaritse tutareba mu cyerekezo cy’inshinge z’isaha"),
keywords: &[
"mu cyerekezo kinyuranye n’icy’inshinge z’isaha",
"mu cyerekezo kinyuranye n’icy’izuba",
"mu cyerekezo kitari icy’inshinge z’isaha",
"umwambi",
"utwambi duhagaritse tutareba mu cyerekezo cy’inshinge z’isaha",
],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("ابتي ڪلاڪ وانگي تيري بٽڻ"),
keywords: &[
"ابتي ڪلاڪ وانگي تيري بٽڻ",
"اينٽي ڪلاڪ وايئز",
"تير",
"مخالف رخ",
"ڪائونٽر ڪلاڪ وائيز",
],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("ව\u{dcf}ම\u{dcf}වර\u{dca}ත ඊතල බොත\u{dca}තම"),
keywords: &[
"ඊතලය",
"දක\u{dca}ෂ\u{dd2}ණ\u{dcf}වර\u{dca}ත දෙසට ව\u{dd2}ර\u{dd4}ද\u{dca}ධව",
"ව\u{dcf}ම\u{dcf}වර\u{dca}ත",
"ව\u{dcf}ම\u{dcf}වර\u{dca}ත ඊතල බොත\u{dca}තම",
"ස\u{dd6}ර\u{dca}ය\u{dcf}ගමනයට ව\u{dd2}ර\u{dd4}ද\u{dca}ධව",
],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("tlačidlo so šípkami proti smeru hodinových ručičiek"),
keywords: &[
"proti smeru hodinových ručičiek",
"tlačidlo so šípkami proti smeru hodinových ručičiek",
"šípka",
],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("gumb s puščicama, ki se ukrivljata v obratni smeri urnega kazalca"),
keywords: &[
"gumb s puščicama, ki se ukrivljata v obratni smeri urinega kazalca",
"gumb s puščicama, ki se ukrivljata v obratni smeri urnega kazalca",
"nasprotna smer urinega kazalca",
"nazaj",
"puščica",
],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("badhanka fallaarada ee lid saacad-wareegga"),
keywords: &[
"badhanka fallaarada ee lid saacad-wareegga",
"fallaar",
"lid saacad-wareegga",
"saacad-wareegga gacan bidix ah",
],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("butoni me shigjetat antiorare"),
keywords: &[
"antiorar",
"butoni me shigjetat antiorare",
"kundër drejtimit të orës",
"kundërorar",
"shigjetë",
],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелице које се врте улево"),
keywords: &[
"стрeлицa",
"стрелице које се врте улево",
"супрoтнo oд кaзaљке нa сaту",
],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("стрелице које се врте улијево"),
keywords: &["стрелице које се врте улијево"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelice koje se vrte ulevo"),
keywords: &[
"strelica",
"strelice koje se vrte ulevo",
"suprotno od kazaljke na satu",
],
},
#[cfg(feature = "sr_Latn_BA")]
crate::Annotation {
lang: "sr_Latn_BA",
tts: Some("strelice koje se vrte ulijevo"),
keywords: &["strelice koje se vrte ulijevo"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("pilar moturs"),
keywords: &["moturs", "pilar"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("kitufe cha mishale ya kinyume saa"),
keywords: &[
"chakaa",
"kinyume saa",
"kitufe cha mishale ya kinyume saa",
"mshale",
],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some("இடஞ\u{bcd}சுழி அம\u{bcd}புக\u{bcd}குறிகள\u{bcd} பொத\u{bcd}த\u{bbe}ன\u{bcd}"),
keywords: &[
"அம\u{bcd}புக\u{bcd}குறிகள\u{bcd}",
"இடஞ\u{bcd}சுழி அம\u{bcd}புக\u{bcd}குறிகள\u{bcd}",
"இடஞ\u{bcd}சுழி அம\u{bcd}புக\u{bcd}குறிகள\u{bcd} பொத\u{bcd}த\u{bbe}ன\u{bcd}",
],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("అపసవ\u{c4d}యద\u{c3f}శల\u{c4b} బ\u{c3e}ణ\u{c3e}ల బటన\u{c4d}"),
keywords: &[
"అపసవ\u{c4d}యద\u{c3f}శల\u{c4b} బ\u{c3e}ణ\u{c3e}ల బటన\u{c4d}",
"ర\u{c3f}ఫ\u{c4d}ర\u{c46}ష\u{c4d} చ\u{c3f}హ\u{c4d}నం",
"ర\u{c3f}ఫ\u{c4d}ర\u{c46}ష\u{c4d} చ\u{c47}య\u{c3f}",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("Тугмаи \"Тирҳои самти муқобили ҳаракати ақрабҳои соат\""),
keywords: &[
"Тугмаи \"Тирҳои самти муқобили ҳаракати ақрабҳои соат\"",
"муқобили самти ҳаракати ақрабҳои соат",
"муқобили ҳаракати офтоб",
"тир",
],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("ล\u{e39}กศรทวนเข\u{e47}มนาฬ\u{e34}กา"),
keywords: &[
"ทวนเข\u{e47}มนาฬ\u{e34}กา",
"ล\u{e39}กศร",
"ล\u{e39}กศรทวนเข\u{e47}มนาฬ\u{e34}กา",
],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("ኣንጻር ሰዓት ዝዘውር ትኹል ምልክት መልጎም"),
keywords: &[
"withershins",
"ምልክት",
"ኣንጻር ሰዓት",
"ኣንጻር ሰዓት ዝዘውር ትኹል ምልክት መልጎም",
],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("sagadyň tersine oklar düwmesi"),
keywords: &[
"ok",
"sagadyň garşysyna",
"sagadyň tersine",
"sagadyň tersine oklar düwmesi",
"yzygiderli oklar",
],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau vilovilo toʻohema"),
keywords: &["hema", "ngahau", "ngahau vilovilo toʻohema", "vilovilo"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("saat yönünün tersinde dönen oklar"),
keywords: &[
"oklar",
"saat yönünün tersinde dönen oklar",
"saat yönünün tersine",
"saat yönünün tersine dönen oklar",
],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("سائەتكە قارشى يۆنىلىشتىكى كۆرسەتكۈچ"),
keywords: &[
"سائەت يۆنىلىشىگە قارشى",
"سائەتكە قارشى",
"سائەتكە قارشى يۆنىلىشتىكى كۆرسەتكۈچ",
"قارشى",
"كۆرسەتكۈچ",
],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("кнопка стрілок проти ходу годинника"),
keywords: &[
"кнопка зі стрілками, напрямленими проти годинникової стрілки",
"кнопка стрілок проти ходу годинника",
"проти годинникової стрілки",
"стрілка",
],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("گھڑی کی مخالف سمت میں تیر بٹن"),
keywords: &[
"تیر",
"سائن",
"سمت",
"گھڑی کی مخالف سمت",
"گھڑی کی مخالف سمت میں تیر بٹن",
],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("soat millariga qarshi yo‘nalish"),
keywords: &[
"chiziq",
"soat millariga qarshi",
"soat millariga qarshi yo‘nalish",
],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("nút mũi tên ngược chiều kim đồng hồ"),
keywords: &[
"mũi tên",
"ngược chiều kim đồng",
"nút mũi tên ngược chiều kim đồng hồ",
],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("bitoŋu fett yu juuyoo te safaan-montar"),
keywords: &[
"bitoŋu fett yu juuyoo te safaan-montar",
"fett",
"juuyoo",
"safaan-montar",
"yoonal",
],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("iqhosha lamatolo elijikeleza ngasekunene"),
keywords: &[
"i-whithershins",
"iqhosha lamatolo elijikeleza ngasekunene",
"jikeleza ngasekhohlo",
"ngokujikeleza ngasekhohlo",
"utolo",
],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("bọtini awọn ọfa yiyi lodi sọna ago"),
keywords: &[
"bọtini awọn ọfa yiyi lodi sọna ago",
"o\u{329}fà",
"ìlòdìsọ\u{301}wọ\u{301} aago",
],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("逆時針箭咀按鈕"),
keywords: &["相反方向", "箭咀", "逆時針", "逆時針箭咀按鈕"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("逆时针箭咀按钮"),
keywords: &["相反方向", "箭咀", "逆时针", "逆时针箭咀按钮"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("逆时针箭头按钮"),
keywords: &["箭头", "逆时针", "逆时针箭头", "逆时针箭头按钮"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("逆時針"),
keywords: &["逆時針"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("逆時針箭嘴按鈕"),
keywords: &["逆時針", "逆時針箭嘴", "逆時針箭嘴按鈕"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("izinkinobho zemicibisholo ejikelezela kwesokunxele"),
keywords: &[
"izinkinobho zemicibisholo ejikelezela kwesokunxele",
"okujikelezela ngakwesokunxele",
"okuphambana nokuya ngakwesokudla",
"umcibisholo",
],
},
],
};
#[doc = "🔙"]
pub const BACK_ARROW: crate::Emoji = crate::Emoji {
glyph: "🔙",
codepoint: "1F519",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "BACK arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("terugpyl"),
keywords: &["pyl", "terug", "terugpyl"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("ተመለስ ቀስት"),
keywords: &["ቀስት", "ተመለስ"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("سهم رجوع"),
keywords: &["رجوع مع سهم أيسر", "سهم", "سهم أيسر", "سهم رجوع"],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some("পশ\u{9cd}চ\u{9be}দম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}"),
keywords: &[
"ক\u{9be}\u{981}ড\u{9bc}",
"পশ\u{9cd}চ\u{9be}দম\u{9c1}খী ক\u{9be}\u{981}ড\u{9bc}",
"পিছলৈ",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("geriyə yönəlmiş ox"),
keywords: &["arxa", "geriyə yönəlmiş ox", "ox"],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("стрэлка «назад»"),
keywords: &["назад", "стрэлка", "стрэлка «назад»"],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("стрелка с back"),
keywords: &["back", "стрелка", "стрелка с back"],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("পিছনে তীর"),
keywords: &["তীর", "পিছনে"],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelica za nazad"),
keywords: &["nazad", "strelica", "strelica za nazad"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("fletxa amb la paraula Back"),
keywords: &["Back", "fletxa", "fletxa amb la paraula Back", "paraula"],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎣᏂᏗᏢ ᎦᏝᏗ"),
keywords: &["ᎣᏂᏗᏝ", "ᎣᏂᏗᏢ ᎦᏝᏗ", "ᎦᏝᏗ"],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("šipka zpět"),
keywords: &["back", "zpět", "šipka"],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("saeth BACK"),
keywords: &["saeth", "saeth BACK", "yn ôl"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("BACK-pil"),
keywords: &["BACK-pil", "tilbage-pil", "venstrepil"],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("BACK-Pfeil"),
keywords: &["BACK-Pfeil", "Pfeil", "links", "zurück"],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("βέλος BACK"),
keywords: &["βέλος", "βέλος BACK", "πίσω"],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("BACK arrow"),
keywords: &["BACK arrow", "arrow", "back"],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flecha BACK"),
keywords: &[
"atrás",
"atrás con flecha izquierda",
"back",
"flecha",
"flecha BACK",
"flecha a la izquierda",
],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &["atrás", "flecha", "flecha BACK"],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("nool BACK"),
keywords: &["nool", "nool BACK", "tagasi"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("atzera gezia"),
keywords: &["atzera", "atzera gezia", "gezi"],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("پیکان عقب"),
keywords: &["برگشت", "پیکان", "پیکان عقب"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("BACK-nuoli"),
keywords: &["BACK-nuoli", "nuoli", "takaisin"],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("back arrow"),
keywords: &["PABALIK", "arrow", "back arrow"],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("BACK pílur"),
keywords: &["BACK pílur", "back", "pílur"],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèche Retour"),
keywords: &["flèche", "flèche Retour"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("étiquette «\u{a0}BACK\u{a0}» et flèche"),
keywords: &[
"back",
"flèche",
"retour",
"étiquette «\u{a0}BACK\u{a0}» et flèche",
],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("saighead SIAR"),
keywords: &["saighead SIAR", "siar agus saighead ar chlé"],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("saighead “BACK”"),
keywords: &["air ais", "saighead", "saighead “BACK”"],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frecha coa mensaxe \"volver\" en inglés"),
keywords: &[
"atrás",
"back",
"frecha",
"frecha coa mensaxe \"volver\" en inglés",
"volver",
],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("પાછળ તીર"),
keywords: &[
"ડાબા તીર સાથ\u{ac7} પાછળ",
"ડાબ\u{ac1}\u{a82} તીર",
"તીર",
"પાછળ",
],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("Kibiya Ta BAYA"),
keywords: &["Kibiya Ta BAYA", "baya", "kibiya"],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("חץ אחורה"),
keywords: &["אחורה", "חזרה", "חץ"],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("वापस तीर"),
keywords: &[
"तीर",
"बाए\u{901} तीर क\u{947} साथ वापस",
"बाया\u{901} तीर",
"वापस",
],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("strelica s natpisom \"back\""),
keywords: &["natrag", "strelica", "strelica s natpisom \"back\""],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("vissza nyíl"),
keywords: &["nyíl", "vissza nyíl", "visszafelé"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("հետ գրությամբ սլաք"),
keywords: &["հետ", "հետ գրությամբ սլաք", "սլաք"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah BACK"),
keywords: &["kembali", "panah", "tanda panah BACK"],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("UBE àzụ"),
keywords: &["UBE àzụ", "ube", "àzụ"],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("til baka"),
keywords: &[
"til baka",
"vinstri ör",
"ör",
"ör til baka",
"ör til vinstri",
],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("freccia BACK"),
keywords: &[
"freccia",
"freccia BACK",
"freccia verso sinistra",
"indietro",
"indietro con freccia verso sinistra",
],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("BACK矢印"),
keywords: &["BACK", "BACK矢印", "バック", "戻る", "矢印"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("panah MBALIK"),
keywords: &["mbalik", "panah", "panah MBALIK"],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("უკანსვლის ღილაკი"),
keywords: &["ისარი", "უკან", "უკანსვლის ღილაკი"],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("Aneccab n tuɣalin"),
keywords: &["Aneccab n tuɣalin"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("«артқа» көрсеткісі"),
keywords: &["«артқа» көрсеткісі", "артқа", "көрсеткі"],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("tilbage"),
keywords: &["tilbage", "tilbage-pil", "venstrepil"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some(
"ព\u{17d2}រ\u{17bd}ញទៅឆ\u{17d2}វេងមានអក\u{17d2}សរ Back នៅព\u{17b8}ក\u{17d2}រោម",
),
keywords: &[
"ថយក\u{17d2}រោយ",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញទៅឆ\u{17d2}វេងមានអក\u{17d2}សរ Back នៅព\u{17b8}ក\u{17d2}រោម",
"ព\u{17d2}រ\u{17bd}ញអក\u{17d2}សរ BACK",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಹ\u{cbf}ಂದ\u{cbf}ನ ಬಾಣ"),
keywords: &[
"ಎಡ ಬಾಣದ ಗುರುತು",
"ಎಡ ಬಾಣದೊಂದ\u{cbf}ಗ\u{cc6} ಹ\u{cbf}ಂದ\u{cc6}",
"ಹ\u{cbf}ಂದ\u{cbf}ನ ಬಾಣ",
"ಹ\u{cbf}ಂದ\u{cbf}ನ ಬಾಣದ ಗುರುತು",
"ಹ\u{cbf}ಂದ\u{cc6}",
],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("뒤로"),
keywords: &["뒤로", "왼쪽 화살표", "화살표"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("फाटी\u{902} बाण"),
keywords: &["फाटी\u{902}", "बाण"],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("артка жебеси"),
keywords: &["артка", "артка жебеси", "жебе"],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("ZERÉCK-Feil"),
keywords: &["Feil", "ZERÉCK-Feil", "zeréck"],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນກ\u{eb1}ບ"),
keywords: &["ກ\u{eb1}ບ", "ລ\u{eb9}ກສອນ", "ລ\u{eb9}ກສອນກ\u{eb1}ບ"],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("rodyklė su užrašu „Back“"),
keywords: &["atgal", "rodyklė", "rodyklė su užrašu „Back“"],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("bultiņa ar uzrakstu Back"),
keywords: &["atpakaļ", "bultiņa", "bultiņa ar uzrakstu Back"],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pere HOKI"),
keywords: &["hoki", "pere", "pere HOKI"],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("стрелка за назад"),
keywords: &["назад", "стрелка", "стрелка за назад"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some("പ\u{d41}റകിലേക\u{d4d}ക\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളം"),
keywords: &[
"അമ\u{d4d}പടയ\u{d3e}ളം",
"ഇടത\u{d4d}തോട\u{d4d}ട\u{d41}ള\u{d4d}ള ബ\u{d3e}ക\u{d4d}ക\u{d4d} ബട\u{d4d}ടൺ",
"പ\u{d41}റകിലേക\u{d4d}ക\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളം",
"ബട\u{d4d}ടൺ",
"ബ\u{d3e}ക\u{d4d}ക\u{d4d}",
"മടങ\u{d4d}ങ\u{d41}ക",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("БУЦАХ сум"),
keywords: &["БУЦАХ сум", "буцах", "сум"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("परत बाण"),
keywords: &["परत", "बाण"],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("anak panah ‘BACK’"),
keywords: &["anak panah", "anak panah ‘BACK’", "kembali"],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("vleġġa BACK"),
keywords: &["lura", "vleġġa", "vleġġa BACK"],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("နောက\u{103a}သ\u{102d}\u{102f}\u{1037} မြား"),
keywords: &[
"နောက\u{103a}သ\u{102d}\u{102f}\u{1037}",
"နောက\u{103a}သ\u{102d}\u{102f}\u{1037}ပြ မြား သင\u{103a}\u{1039}ကေတ",
"မြား",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("BACK-pil"),
keywords: &[
"BACK-pil",
"pil",
"pil til venstre",
"tilbake",
"tilbakepil",
],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("पछाडि स\u{902}क\u{947}त गर\u{94d}न\u{947} वाण"),
keywords: &[
"पछाडि जान\u{947}",
"पछाडि स\u{902}क\u{947}त गर\u{94d}न\u{947} वाण",
"वाण",
],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("BACK-pijl"),
keywords: &["BACK-pijl", "pijl", "terug"],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("BACK-pil"),
keywords: &[
"BACK-pil",
"pil",
"pil til venstre",
"tilbake",
"tilbakepil",
],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ପଛ ତୀର"),
keywords: &["ତୀର", "ପଛ"],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਪਿ\u{a71}ਛ\u{a47} ਵ\u{a71}ਲ ਇਸ\u{a3c}ਾਰਾ ਕਰਨ ਵਾਲਾ ਤੀਰ"),
keywords: &[
"ਖ\u{a71}ਬਾ ਤੀਰ",
"ਖ\u{a71}ਬ\u{a47} ਤੀਰ ਨਾਲ ਪਿ\u{a71}ਛ\u{a47}",
"ਤੀਰ",
"ਪਿ\u{a71}ਛ\u{a47}",
"ਪਿ\u{a71}ਛ\u{a47} ਵ\u{a71}ਲ ਇਸ\u{a3c}ਾਰਾ ਕਰਨ ਵਾਲਾ ਤੀਰ",
],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("پیچھے دا تیر"),
keywords: &["تیر", "پیچھے", "پیچھے دا تیر"],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("BACK Áro"),
keywords: &["BACK Áro", "Bak", "Áro"],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("strzałka z napisem BACK"),
keywords: &["strzałka", "strzałka z napisem BACK", "wstecz"],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("شاته غشی"),
keywords: &["شاته", "غشی"],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("seta \"BACK\""),
keywords: &["seta", "seta \"BACK\"", "seta para a esquerda", "voltar"],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("seta BACK"),
keywords: &["para trás", "seta", "seta BACK"],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("QHIPA wachʼi"),
keywords: &["QHIPA wachʼi"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("săgeată cu textul BACK"),
keywords: &["săgeată", "săgeată cu textul BACK", "înapoi"],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-725"),
keywords: &["E10-725"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("стрелка «назад»"),
keywords: &["назад", "стрелка", "стрелка «назад»"],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("akambi ka BACK"),
keywords: &["akambi ka BACK", "subira inyuma", "umwambi"],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("پٺيان وارو تير"),
keywords: &["تير", "پٺيان", "پٺيان وارو تير"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("පස\u{dd4} ඊතලය"),
keywords: &["ඊතලය", "පස\u{dd4} ඊතලය", "පස\u{dd4}පසට"],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("šípka BACK"),
keywords: &["späť", "šípka", "šípka BACK"],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("puščica nazaj"),
keywords: &["nazaj", "puščica"],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("fallaarta GADAAL"),
keywords: &["falaar", "fallaarta GADAAL", "gadaal"],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("shigjetë prapa"),
keywords: &["prapa", "shigjetë"],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелица за повратак"),
keywords: &["назад", "стрeлицa", "стрелица за повратак"],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelica za povratak"),
keywords: &["nazad", "strelica", "strelica za povratak"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("Back med vänsterpil"),
keywords: &["Back med vänsterpil", "bakåt", "vänsterpil"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mshale wa nyuma"),
keywords: &["mshale", "mshale wa nyuma", "nyuma"],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some("பின\u{bcd}ன\u{bbe}ல\u{bcd} செல\u{bcd}"),
keywords: &[
"திரும\u{bcd}பிப\u{bcd} போ",
"பின\u{bcd}செல\u{bcd}லும\u{bcd} அம\u{bcd}புக\u{bcd}குறி",
"பின\u{bcd}ன\u{bbe}ல\u{bcd} செல\u{bcd}",
],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("వ\u{c46}నుకకు బ\u{c3e}ణం"),
keywords: &["బ\u{c3e}ణం", "వ\u{c46}నుకకు"],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("тири \"бозгашт\""),
keywords: &["тир", "тири \"бозгашт\"", "қафо"],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("กล\u{e31}บ"),
keywords: &["กล\u{e31}บ", "ล\u{e39}กศร", "ล\u{e39}กศรกล\u{e31}บ"],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("ዳሕረዋይ ምልክት"),
keywords: &["ምልክት", "ዳሕረዋይ ምልክት", "ድሕሪት"],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("BACK oky"),
keywords: &["BACK oky", "ok", "yza"],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau FOKI"),
keywords: &["FOKI", "ngahau"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("geri oku"),
keywords: &["geri oku", "ok", "sol geri ok", "sol ok"],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("ئارقا كۆرسەتكۈچ"),
keywords: &["ئارقا", "كۆرسەتكۈچ"],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("стрілка назад [BACK]"),
keywords: &[
"назад",
"стрілка",
"стрілка BACK",
"стрілка назад BACK",
"стрілка назад [BACK]",
],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("واپسی تیر"),
keywords: &["تیر", "واپسی تیر", "پیچھے"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("orqaga yo‘nalish"),
keywords: &["chiziq", "orqaga", "orqaga yo‘nalish"],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("mũi tên BACK"),
keywords: &["back", "mũi tên", "mũi tên BACK"],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("fettu DELLU"),
keywords: &["fett", "fettu DELLU", "montar"],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("utolo oluthi EMVA"),
keywords: &["emva", "utolo", "utolo oluthi EMVA"],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("Ọfa ẸYIN"),
keywords: &["o\u{329}fà", "ẹ\u{300}yìn", "Ọfa ẸYIN"],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("返回箭咀"),
keywords: &["箭咀", "返回", "返回箭咀"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("返回箭咀"),
keywords: &["箭咀", "返回", "返回箭咀"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("返回箭头"),
keywords: &["回退", "箭头", "返回", "返回箭头"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("返回"),
keywords: &["返回"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("後退箭嘴"),
keywords: &["向後", "向後箭嘴", "後退", "後退箭嘴"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("umcibisholo oya emuva"),
keywords: &["emuva", "umcibisholo", "umcibisholo oya emuva"],
},
],
};
#[doc = "🔚"]
pub const END_ARROW: crate::Emoji = crate::Emoji {
glyph: "🔚",
codepoint: "1F51A",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "END arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("eindpyl"),
keywords: &["einde", "eindpyl", "pyl"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("ማብቂያ ቀስት"),
keywords: &["መጨረሻ", "ማብቂያ ቀስት", "ቀስት"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("سهم النهاية"),
keywords: &[
"انتهاء مع سهم أيسر",
"سهم",
"سهم أيسر",
"سهم النهاية",
"سهم انتهاء",
],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some("সম\u{9be}প\u{9cd}তি ক\u{9be}\u{981}ড\u{9bc}"),
keywords: &[
"ক\u{9be}\u{981}ড\u{9bc}",
"সম\u{9be}প\u{9cd}ত",
"সম\u{9be}প\u{9cd}তি ক\u{9be}\u{981}ড\u{9bc}",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("sona yönəlmiş ox"),
keywords: &["ox", "son", "sona yönəlmiş ox"],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("стрэлка «канец»"),
keywords: &["канец", "стрэлка", "стрэлка «канец»"],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("стрелка с end"),
keywords: &["end", "стрелка", "стрелка с end"],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("শেষের তীর"),
keywords: &["তীর", "শেষ", "শেষের তীর"],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelica za kraj"),
keywords: &["kraj", "strelica", "strelica za kraj"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("fletxa amb la paraula End"),
keywords: &["End", "fletxa", "fletxa amb la paraula End", "paraula"],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᏭᎵᏍᏆᏛ ᎦᏝᏗ"),
keywords: &["ᎦᏝᏗ", "ᏭᎵᏍᏆᏛ ᎦᏝᏗ", "ᏭᎵᏍᏛ"],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("šipka s nápisem END"),
keywords: &["konec", "šipka", "šipka s nápisem END"],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("saeth END"),
keywords: &["diwedd", "saeth", "saeth END"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("END-pil"),
keywords: &["END-pil", "end-pil", "venstrepil"],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("END-Pfeil"),
keywords: &["END-Pfeil", "Pfeil", "links"],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("βέλος END"),
keywords: &["βέλος", "βέλος END", "τέλος"],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("END arrow"),
keywords: &["END arrow", "arrow", "end"],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flecha END"),
keywords: &[
"final",
"final con flecha izquierda",
"flecha",
"flecha END",
"flecha a la izquierda",
],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &["fin", "flecha", "flecha END"],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("nool END"),
keywords: &["lõpp", "nool", "nool END"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("amaiera gezia"),
keywords: &["amaiera", "amaiera gezia", "gezi"],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("پیکان پایان"),
keywords: &["پایان", "پیکان"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("END-nuoli"),
keywords: &["END-nuoli", "lopeta", "nuoli"],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("end arrow"),
keywords: &["DULO", "arrow", "end arrow", "katapusan"],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("END pílur"),
keywords: &["END pílur", "end", "pílur"],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèche Fin"),
keywords: &["flèche", "flèche Fin"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("étiquette « END » et flèche"),
keywords: &["end", "fin", "flèche", "étiquette « END » et flèche"],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("saighead DEIRIDH"),
keywords: &["deireadh agus saighead ar chlé", "saighead DEIRIDH"],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("saighead “END”"),
keywords: &["deireadh", "saighead", "saighead “END”"],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frecha coa mensaxe \"fin\" en inglés"),
keywords: &[
"end",
"fin",
"final",
"frecha",
"frecha coa mensaxe \"fin\" en inglés",
],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("સમાપ\u{acd}તિ તીર"),
keywords: &[
"ડાબા તીર સાથ\u{ac7} સમાપ\u{acd}તિ",
"ડાબ\u{ac1}\u{a82} તીર",
"તીર",
"સમાપ\u{acd}તિ",
],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("Kibiya Ta ƘARSHE"),
keywords: &["Kibiya Ta ƘARSHE", "kibiya", "ƙarshe"],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("חץ לסוף"),
keywords: &["חץ", "חץ לסוף", "סוף"],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("END क\u{947} साथ तीर"),
keywords: &[
"END क\u{947} साथ तीर",
"तीर",
"बाए\u{901} तीर क\u{947} साथ समाप\u{94d}ति",
"बाया\u{901} तीर",
"समाप\u{94d}ति",
],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("strelica s natpisom \"end\""),
keywords: &["kraj", "strelica", "strelica s natpisom \"end\""],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("vége nyíl"),
keywords: &["nyíl", "vége"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("ՎԵՐՋ սլաք"),
keywords: &["ՎԵՐՋ սլաք", "սլաք", "վերջ", "վերջ գրությամբ սլաք"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah END"),
keywords: &["berakhir", "panah", "tanda panah END"],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("UBE ngwụcha"),
keywords: &["UBE ngwụcha", "ngwụcha", "ube"],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("endir"),
keywords: &["endaör", "endaör til vinstri", "endir", "vinstri ör", "ör"],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("freccia END"),
keywords: &[
"end con freccia verso sinistra",
"fine",
"freccia",
"freccia END",
"freccia end",
"freccia verso sinistra",
],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("END矢印"),
keywords: &["END", "END矢印", "エンド", "矢印", "終わり"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("panah MENTOK"),
keywords: &["mentok", "panah", "panah MENTOK"],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ბოლოში გადასვლის ისარი"),
keywords: &["ბოლო", "ბოლოში გადასვლის ისარი", "ისარი"],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("Aneccab n tagara"),
keywords: &["Aneccab n tagara"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("«соңы» көрсеткісі"),
keywords: &["«соңы» көрсеткісі", "көрсеткі", "соңына"],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("end"),
keywords: &["end", "end-pil", "venstrepil"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some(
"ព\u{17d2}រ\u{17bd}ញទៅឆ\u{17d2}វេងមានអក\u{17d2}សរ End នៅព\u{17b8}ក\u{17d2}រោម",
),
keywords: &[
"END",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញទៅឆ\u{17d2}វេងមានអក\u{17d2}សរ End នៅព\u{17b8}ក\u{17d2}រោម",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಅಂತ\u{cbf}ಮ ಬಾಣ"),
keywords: &[
"ಅಂತ\u{cbf}ಮ",
"ಅಂತ\u{cbf}ಮ ಬಾಣದ ಗುರುತು",
"ಎಡ ಬಾಣ",
"ಎಡ ಬಾಣದೊಂದ\u{cbf}ಗ\u{cc6} ಅಂತ\u{cbf}ಮ",
"ಬಾಣ",
],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("종료"),
keywords: &["끝내기 화살표", "종료", "화살표"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("समाप\u{94d}त बाण"),
keywords: &["बाण", "समाप\u{94d}त"],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("аягы жебеси"),
keywords: &["аягы", "аягы жебеси", "жебе"],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("ENN-Feil"),
keywords: &["ENN-Feil", "Enn", "Feil"],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນສ\u{eb4}\u{ec9}ນສ\u{eb8}ດ"),
keywords: &[
"ລ\u{eb9}ກສອນ",
"ລ\u{eb9}ກສອນສ\u{eb4}\u{ec9}ນສ\u{eb8}ດ",
"ສ\u{eb4}\u{ec9}ນສ\u{eb8}ດ",
],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("rodyklė su užrašu „End“"),
keywords: &["pabaiga", "rodyklė", "rodyklė su užrašu „End“"],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("bultiņa ar uzrakstu End"),
keywords: &["beigas", "beigt", "bultiņa", "bultiņa ar uzrakstu End"],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pere MUTUNGA"),
keywords: &["mutunga", "pere", "pere MUTUNGA"],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("стрелка за крај"),
keywords: &["крај", "стрелка", "стрелка за крај"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some(
"അവസ\u{d3e}നിപ\u{d4d}പിക\u{d4d}ക\u{d41}ന\u{d4d}നതിന\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളം",
),
keywords: &[
"അമ\u{d4d}പടയ\u{d3e}ളം",
"അവസ\u{d3e}നിപ\u{d4d}പിക\u{d4d}ക\u{d41}ക",
"അവസ\u{d3e}നിപ\u{d4d}പിക\u{d4d}ക\u{d41}ന\u{d4d}നതിന\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളം",
"ഇടത\u{d4d}തോട\u{d4d}ട\u{d41}ള\u{d4d}ള എൻഡ\u{d4d} ബട\u{d4d}ടൺ",
"ബട\u{d4d}ടൺ",
"ബ\u{d3e}ക\u{d4d}ക\u{d4d}",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("төгсөх сум"),
keywords: &["сум", "төгсөх"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("समाप\u{94d}ती बाण"),
keywords: &["बाण", "समाप\u{94d}ती"],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("anak panah ‘END’"),
keywords: &["anak panah", "anak panah ‘END’", "tamat"],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("vleġġa END"),
keywords: &["tmiem", "vleġġa", "vleġġa END"],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("အဆ\u{102f}\u{1036}းသတ\u{103a}ပြ မြား"),
keywords: &[
"မြား",
"အဆ\u{102f}\u{1036}း",
"အဆ\u{102f}\u{1036}းသတ\u{103a}ပြ မြား",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("END-pil"),
keywords: &[
"END-pil",
"pil",
"pil til venstre",
"slutt",
"slutt med pil",
"sluttpil",
],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("सकिएको स\u{902}क\u{947}त गर\u{94d}न\u{947} वाण"),
keywords: &[
"अन\u{94d}तिम",
"वाण",
"सकिएको स\u{902}क\u{947}त गर\u{94d}न\u{947} वाण",
],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("END-pijl"),
keywords: &["END-pijl", "einde", "pijl"],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("END-pil"),
keywords: &[
"END-pil",
"pil",
"pil til venstre",
"slutt",
"slutt med pil",
"sluttpil",
],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ଶେଷ ତୀର"),
keywords: &["ତୀର", "ଶେଷ"],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਸਮਾਪਤੀ ਵ\u{a71}ਲ ਇਸ\u{a3c}ਾਰਾ ਕਰਨ ਵਾਲਾ ਤੀਰ"),
keywords: &[
"ਖ\u{a71}ਬਾ ਤੀਰ",
"ਖ\u{a71}ਬ\u{a47} ਤੀਰ ਨਾਲ ਸਮਾਪਤ",
"ਤੀਰ",
"ਸਮਾਪਤ",
"ਸਮਾਪਤੀ ਵ\u{a71}ਲ ਇਸ\u{a3c}ਾਰਾ ਕਰਨ ਵਾਲਾ ਤੀਰ",
],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("آخر دا تیر"),
keywords: &["آخر دا تیر", "اختتام", "تیر"],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("END Áro"),
keywords: &["Bak", "END Áro", "Áro"],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("strzałka z napisem END"),
keywords: &["koniec", "strzałka", "strzałka z napisem END"],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("د پای غشی"),
keywords: &["د پای غشی", "غشی", "پای"],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("seta \"END\""),
keywords: &["fim", "seta", "seta \"END\"", "seta para a esquerda"],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("seta END"),
keywords: &["para o fim", "seta", "seta END"],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("TUKUY wachʼi"),
keywords: &["TUKUY wachʼi"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("săgeată cu textul END"),
keywords: &["final", "săgeată", "săgeată cu textul END"],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-726"),
keywords: &["E10-726"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("стрелка «конец»"),
keywords: &["конец", "стрелка «конец»", "стрелка конец"],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("akambi ka END"),
keywords: &["ahahera", "akambi ka END", "umwambi"],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("پڇاڙي وارو تير"),
keywords: &["تير", "پڇاڙي", "پڇاڙي وارو تير"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("අවස\u{dcf}නයට ඊතලය"),
keywords: &["අවස\u{dcf}නයට", "ඊතලය"],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("šípka END"),
keywords: &["koniec", "šípka", "šípka END"],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("puščica konec"),
keywords: &["konec", "puščica"],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("Fallaarta DHAMMAADKA"),
keywords: &["Fallaarta DHAMMAADKA", "dhammaad", "fallaar"],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("shigjeta e fundit"),
keywords: &["fund", "shigjeta e fundit", "shigjetë"],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелица за крај"),
keywords: &["крaj", "стрeлицa", "стрелица за крај"],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelica za kraj"),
keywords: &["kraj", "strelica", "strelica za kraj"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("End med högerpil"),
keywords: &["End med högerpil", "framåt", "högerpil"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mshale wa mwisho"),
keywords: &["mshale", "mshale wa mwisho", "mwisho"],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some("முடிவு"),
keywords: &["அம\u{bcd}புக\u{bcd}குறி", "முடிவு"],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("ముగ\u{c3f}స\u{c3f}ంద\u{c3f} బ\u{c3e}ణం"),
keywords: &[
"బ\u{c3e}ణం",
"ముగ\u{c3f}ంపు",
"ముగ\u{c3f}స\u{c3f}ంద\u{c3f} బ\u{c3e}ణం",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("тири \"охир\""),
keywords: &["охир", "тир", "тири \"охир\""],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("ส\u{e34}\u{e49}นส\u{e38}ด"),
keywords: &[
"ล\u{e39}กศร",
"ล\u{e39}กศรส\u{e34}\u{e49}นส\u{e38}ด",
"ส\u{e34}\u{e49}นส\u{e38}ด",
],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("መወዳእታ ናይ ምልክት"),
keywords: &["መወዳእታ", "መወዳእታ ናይ ምልክት", "ምልክት"],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("END oky"),
keywords: &["END oky", "ok", "soňy"],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau NGATA"),
keywords: &["NGATA", "ngahau"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("bitiş oku"),
keywords: &["bitiş oku", "ok", "sol bitiş oku", "sol ok"],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("ئاياغ كۆرسەتكۈچ"),
keywords: &["ئاياغ", "كۆرسەتكۈچ"],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("стрілка [END]"),
keywords: &["кінець", "стрілка", "стрілка [END]"],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("اختتام تیر"),
keywords: &["اختتام", "تیر"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("berk chizig‘i"),
keywords: &["berk", "berk chizig‘i", "chiziq"],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("mũi tên END"),
keywords: &["end", "mũi tên", "mũi tên END"],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("fettu JEEX"),
keywords: &["fett", "fettu JEEX", "jeex"],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("utolo oluthi UKUPHELA"),
keywords: &["ukuphela", "utolo", "utolo oluthi UKUPHELA"],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("Ọfa IPARI"),
keywords: &["ìparí", "Ọfa IPARI", "ọfà"],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("結束箭咀"),
keywords: &["箭咀", "結束", "結束箭咀"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("结束箭咀"),
keywords: &["箭咀", "结束", "结束箭咀"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("结束箭头"),
keywords: &["箭头", "结束", "结束箭头"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("結束"),
keywords: &["結束"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("結束箭嘴"),
keywords: &["結束", "結束箭嘴"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("umcibisholo wesiphetho"),
keywords: &["isiphetho", "umcibisholo", "umcibisholo wesiphetho"],
},
],
};
#[doc = "🔛"]
pub const ON_ARROW: crate::Emoji = crate::Emoji {
glyph: "🔛",
codepoint: "1F51B",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "ON! arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("aan!-pyl"),
keywords: &["aan", "aan!-pyl", "merk", "pyl"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("በርቷል! ቀስት"),
keywords: &["ምልክት", "ቀስት", "በርቷል! ቀስት", "አብራ"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("سهم تشغيل"),
keywords: &[
"تشغيل مع علامة تعجب وسهم",
"تعجب",
"سهم",
"سهم تشغيل",
"علامة تعجب",
],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some("চ\u{9be}ল\u{9c1}! ক\u{9be}\u{981}ড\u{9bc}"),
keywords: &[
"ক\u{9be}\u{981}ড\u{9bc}",
"চ\u{9be}ল\u{9c1}",
"চ\u{9be}ল\u{9c1}! ক\u{9be}\u{981}ড\u{9bc}",
"চিহ\u{9cd}ন",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("aktivdir! oxu"),
keywords: &["aktivdir! oxu", "işarə", "ox", "üstündə"],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("стрэлка «уключана!»"),
keywords: &["актыўна", "стрэлка", "стрэлка «уключана!»", "уключана"],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("стрелка с „on!“"),
keywords: &["on", "стрелка", "стрелка с „on!“"],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("অন! তীর"),
keywords: &["অন! তীর", "চ\u{9be}ল\u{9c1}", "চিহ\u{9cd}ন", "তীর"],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelica za uključeno"),
keywords: &["oznaka", "strelica", "strelica za uključeno", "uključeno"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("fletxa amb la paraula ON!"),
keywords: &["ON!", "fletxa", "fletxa amb la paraula ON!", "paraula"],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎧᏂᎩᏓ! ᎦᏝᏗ"),
keywords: &["ᎦᏝᏗ", "ᎧᏂᎩᏓ! ᎦᏝᏗ", "ᎪᏪᎸ", "ᏄᏍᏛᎢ"],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("šipky s nápisem ON!"),
keywords: &["kupředu", "značka", "šipka", "šipky s nápisem ON!"],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("saeth ON!"),
keywords: &["marc", "saeth", "saeth ON!", "ymlaen"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("ON!-dobbeltpil"),
keywords: &["ON!", "ON!-dobbeltpil", "ON!-pil", "dobbeltpil"],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("ON!-Pfeil"),
keywords: &["ON!-Pfeil", "Pfeil", "rechts und links"],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("βέλος ON!"),
keywords: &["βέλος", "βέλος ON!", "ενεργοποίηση", "σήμα"],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("ON! arrow"),
keywords: &["ON! arrow", "arrow", "mark", "on"],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flecha ON!"),
keywords: &[
"flecha",
"flecha ON!",
"flecha de doble punta con la palabra \"on\" encima",
"on",
"señal",
],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &["flecha", "flecha ON!", "marca"],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("nool ON!"),
keywords: &["märk", "nool", "nool ON!"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("“on” testua duen gezia"),
keywords: &["gezi", "markatu", "on", "“on” testua duen gezia"],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("پیکان روشن"),
keywords: &["روشن", "علامت", "پیکان"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("ON!-nuoli"),
keywords: &["ON!-nuoli", "nuoli", "päällä"],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("on! arrow"),
keywords: &["ON!", "arrow", "naka-on", "on! arrow"],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("ON! pílur"),
keywords: &["ON! pílur", "on", "pílur"],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèche Activé"),
keywords: &["flèche", "flèche Activé"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("étiquette « ON! » et flèche"),
keywords: &["en marche", "flèche", "on", "étiquette « ON! » et flèche"],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("saighead AR SIÚL!"),
keywords: &["ar siúl", "marc", "on", "saighead", "saighead AR SIÚL!"],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("saighead “ON”"),
keywords: &["air", "comharra", "saighead", "saighead “ON”"],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frecha coa mensaxe \"en marcha!\" en inglés"),
keywords: &[
"acender",
"acendido",
"frecha",
"frecha coa mensaxe \"en marcha!\" en inglés",
"on!",
],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("ચાલ\u{ac1}! તીર"),
keywords: &[
"ઉદ\u{acd}ગારવાચક",
"ઉદ\u{acd}ગારવાચક ચિહ\u{acd}ન",
"ઉદ\u{acd}ગારવાચક ચિહ\u{acd}ન અન\u{ac7} તીર સાથ\u{ac7} ચાલ\u{ac1}",
"ચાલ\u{ac1} તીર",
"ચાલ\u{ac1}! તીર",
"તીર",
],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("Kibiyar A KUNNE!"),
keywords: &["Kibiyar A KUNNE!", "a kunne", "kibiya", "maki"],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("חץ פועל!"),
keywords: &["חץ", "חץ פועל!", "פועל"],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("on! तीर"),
keywords: &["ON! तीर", "on! तीर", "चाल\u{942}", "चिह\u{94d}न", "तीर"],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("strelica s natpisom \"on!\""),
keywords: &[
"oznaka",
"strelica",
"strelica s natpisom \"on!\"",
"uključeno",
],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("bekapcsolva nyíl"),
keywords: &["bekapcsolva", "jelzés", "nyíl"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("միացված է գրությամբ սլաք"),
keywords: &["միացված է", "միացված է գրությամբ սլաք", "նշան", "սլաք"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah ON!"),
keywords: &["aktif", "panah", "tanda", "tanda panah ON!"],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("NA! ube"),
keywords: &["NA! ube", "akara", "na", "ube"],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("kveikt-ör"),
keywords: &[
"Kveikt með upphrópunarmerki og ör",
"kveikt-ör",
"upphrópun",
"upphrópunarmerki",
"ör",
],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("freccia ON"),
keywords: &[
"ON con freccia e punto esclamativo",
"freccia",
"freccia ON",
"punto esclamativo",
],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("ON矢印"),
keywords: &["ON", "ON矢印", "オン", "矢印"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("panah ON!"),
keywords: &["on", "panah", "panah ON!", "tenger"],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ისარი on!"),
keywords: &["ისარი", "ისარი on!", "ნიშანი", "ჩართვა"],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("urmid"),
keywords: &["urmid"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("«қосулы» көрсеткісі"),
keywords: &["«қосулы» көрсеткісі", "белгі", "көрсеткі", "қосулы"],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("ON-pil"),
keywords: &["ON med udråbstegn", "ON-pil", "på"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some(
"ព\u{17d2}រ\u{17bd}ញទៅឆ\u{17d2}វេងទៅស\u{17d2}តា\u{17c6}មានអក\u{17d2}សរ ON នៅព\u{17b8}ក\u{17d2}រោម",
),
keywords: &[
"ON!",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញទៅឆ\u{17d2}វេងទៅស\u{17d2}តា\u{17c6}មានអក\u{17d2}សរ ON នៅព\u{17b8}ក\u{17d2}រោម",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಆನ\u{ccd}! ಬಾಣದ ಗುರುತು"),
keywords: &[
"ಆನ\u{ccd} ಬಾಣದ ಗುರುತು",
"ಆನ\u{ccd}! ಬಾಣದ ಗುರುತು",
"ಆಶ\u{ccd}ಚರ\u{ccd}ಯ ಗುರುತು",
"ಆಶ\u{ccd}ಚರ\u{ccd}ಯ ಸ\u{cc2}ಚಕ",
],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("켜짐"),
keywords: &["양쪽 방향 화살표", "켜짐", "화살표"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("चाल\u{942}! बाण"),
keywords: &["ख\u{942}ण", "चाल\u{942}", "चाल\u{942}! बाण", "बाण"],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("күйүк! жебеси"),
keywords: &["белги", "жебе", "күйүк! жебеси"],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("UN!-Feil"),
keywords: &["Feil", "Markéierung", "UN!-Feil", "un"],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນ ເປ\u{eb5}ດ!"),
keywords: &["ລ\u{eb9}ກສອນ", "ລ\u{eb9}ກສອນ ເປ\u{eb5}ດ!", "ເປ\u{eb5}ດ"],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("rodyklė su užrašu „On!“"),
keywords: &["rodyklė", "rodyklė su užrašu „On!“", "žymė"],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("bultiņa ar uzrakstu On!"),
keywords: &["bultiņa", "bultiņa ar uzrakstu On!", "ieslēgts"],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pere KĀ!"),
keywords: &["kā", "pere", "pere KĀ!", "tohu"],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("стрелка за вклучено"),
keywords: &["вклучено", "стрелка", "стрелка за вклучено"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some("ഓൺ ചെയ\u{d4d}യ\u{d41}ന\u{d4d}നതിന\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളം"),
keywords: &[
"അടയ\u{d3e}ളം",
"അമ\u{d4d}പടയ\u{d3e}ളം",
"ഇര\u{d41}ഭ\u{d3e}ഗത\u{d4d}തേക\u{d4d}ക\u{d41}മ\u{d41}ള\u{d4d}ള ആരോ ബട\u{d4d}ടണ\u{d41}കളിൽ ഓകെ ചിഹ\u{d4d}നം",
"ഓൺ ചെയ\u{d4d}യ\u{d41}ന\u{d4d}നതിന\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളം",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("дээр гэсэн сум"),
keywords: &["дээр", "дээр гэсэн сум", "сум"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("चाल\u{942}! बाण"),
keywords: &["चाल\u{942}", "चाल\u{942}! बाण", "चिन\u{94d}ह", "बाण"],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("anak panah ON!"),
keywords: &["anak panah", "anak panah ON!", "hidup", "tanda"],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("vleġġa ON!"),
keywords: &["fuq", "marka", "vleġġa", "vleġġa ON!"],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("ဖ\u{103d}င\u{1037}\u{103a}ထားခြင\u{103a}းပြ မြား သင\u{103a}\u{1039}ကေတ"),
keywords: &[
"ဖ\u{103d}င\u{1037}\u{103a}ထားခြင\u{103a}းပြ မြား သင\u{103a}\u{1039}ကေတ",
"ဖ\u{103d}င\u{1037}\u{103a}ထားပြ\u{102e}း ပြ သင\u{103a}\u{1039}ကေတ",
"ဖ\u{103d}င\u{1037}\u{103a}ထားပြ\u{102e}းပြ မြား သင\u{103a}\u{1039}ကေတ",
"မြား",
"အမ\u{103e}တ\u{103a}အသား",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("ON!-pil"),
keywords: &["ON", "ON!-pil", "PÅ", "pil", "utrop", "utropstegn"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("चलिरह\u{947}को! स\u{902}क\u{947}त गर\u{94d}न\u{947} वाण"),
keywords: &[
"चलिरह\u{947}को! स\u{902}क\u{947}त गर\u{94d}न\u{947} वाण",
"चिन\u{94d}ह",
"मा",
"वाण",
],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("ON!-pijl"),
keywords: &["ON!-pijl", "aan", "pijl", "teken"],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("ON!-pil"),
keywords: &["ON", "ON!-pil", "PÅ", "pil", "utrop", "utropsteikn"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ଅନ\u{b4d}\u{200c}! ତୀର"),
keywords: &[
"ଅନ\u{b4d}\u{200c}",
"ଅନ\u{b4d}\u{200c}! ତୀର",
"ଚ\u{b3f}ହ\u{b4d}ନ",
"ତୀର",
],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਦ\u{a4b} ਪਾਸ\u{a47} ਚਾਲ\u{a42} ਵ\u{a71}ਲ ਇਸ\u{a3c}ਾਰਾ ਕਰਨ ਵਾਲਾ ਤੀਰ"),
keywords: &[
"ON ਤੀਰ",
"ON ਹ\u{a48}ਰਾਨੀਵਾਚਕ ਨਿਸ\u{a3c}ਾਨ ਨਾਲ ਅਤ\u{a47} ਤੀਰ",
"ਤੀਰ",
"ਦ\u{a4b} ਪਾਸ\u{a47} ਚਾਲ\u{a42} ਵ\u{a71}ਲ ਇਸ\u{a3c}ਾਰਾ ਕਰਨ ਵਾਲਾ ਤੀਰ",
"ਹ\u{a48}ਰਾਨੀਵਾਚਕ",
"ਹ\u{a48}ਰਾਨੀਵਾਚਕ ਨਿਸ\u{a3c}ਾਨ",
],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("چلان دا تیر"),
keywords: &["تیر", "نشان لانا", "چلان دا تیر", "چلانا"],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("ON! Áro"),
keywords: &["Mak", "ON! Áro", "Áro", "Ọn"],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("strzałka z napisem ON!"),
keywords: &["strzałka", "strzałka z napisem ON!", "włączone"],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("چالو! غشی"),
keywords: &["غشی", "نښه", "چالو", "چالو! غشی"],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("seta \"ON!\""),
keywords: &["ON!", "marca", "on", "seta", "seta \"ON!\""],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("seta ON!"),
keywords: &["ligado", "seta", "seta ON!"],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("¡KAWSARICHIY! wachʼi"),
keywords: &["¡KAWSARICHIY! wachʼi"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("săgeată cu textul ON!"),
keywords: &["marcaj", "săgeată", "săgeată cu textul ON!"],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-727"),
keywords: &["E10-727"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("стрелка «активно»"),
keywords: &[
"активно",
"включено",
"стрелка «активно»",
"стрелка активно",
],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("akambi ka BIRAFUNGUYE!"),
keywords: &["akambi ka BIRAFUNGUYE!", "ikimenyetso", "kuri", "umwambi"],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("آن! تير"),
keywords: &["آن", "آن! تير", "تير", "نشان"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("ක\u{dca}\u{200d}ර\u{dd2}ය\u{dcf}ත\u{dca}මකය\u{dd2}! ඊතලය"),
keywords: &[
"ඊතලය",
"ක\u{dca}\u{200d}ර\u{dd2}ය\u{dcf}ත\u{dca}මක",
"ක\u{dca}\u{200d}ර\u{dd2}ය\u{dcf}ත\u{dca}මකය\u{dd2}! ඊතලය",
"ලක\u{dd4}ණ",
],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("šípka ON!"),
keywords: &["zapnuté", "značka", "šípka", "šípka ON!"],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("puščica NAPREJ!"),
keywords: &["puščica", "puščica NAPREJ!", "vklop", "znak"],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("fallaarta DUSHA!"),
keywords: &["calaamad", "dusha", "fallaar", "fallaarta DUSHA!"],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("shigjeta aktive!"),
keywords: &["aktiv", "aktive", "shenjë", "shigjeta aktive!", "shigjetë"],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелица за укључено"),
keywords: &["oзнaкa", "стрeлицa", "стрелица за укључено", "укључeнo"],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelica za uključeno"),
keywords: &["oznaka", "strelica", "strelica za uključeno", "uključeno"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("ON! med pil höger och vänster ovanför"),
keywords: &[
"ON! med pil höger och vänster ovanför",
"pil höger och vänster",
"utropstecken",
],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mshale wa hewani!"),
keywords: &["alama", "hewani", "mshale", "mshale wa hewani!"],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some("ஆன\u{bcd}"),
keywords: &[
"அம\u{bcd}புக\u{bcd}குறி",
"ஆன\u{bcd}",
"ஆன\u{bcd}! அம\u{bcd}புக\u{bcd}குறி",
"குறி",
],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("ఇరువ\u{c48}పులు బ\u{c3e}ణం"),
keywords: &[
"ఇరువ\u{c48}పుల\u{c3e} ప\u{c4d}రవ\u{c47}శం ఉంద\u{c3f}",
"ఇరువ\u{c48}పులు బ\u{c3e}ణం",
"ఎల\u{c3e}గ\u{c48}న\u{c3e} వ\u{c46}ళ\u{c4d}లవచ\u{c4d}చు",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("тири \"фаъол!\""),
keywords: &["нишона", "тир", "тири \"фаъол!\"", "фаъол"],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("เป\u{e34}ด"),
keywords: &["ล\u{e39}กศร", "เป\u{e34}ด"],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("ኣብ ልዕሊ! ምልክት"),
keywords: &["መፈለጥታ", "ምልክት", "ኣብ ልዕሊ", "ኣብ ልዕሊ! ምልክት"],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("ON! oky"),
keywords: &["ON! oky", "açyk", "bellik", "ok"],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau FUNGA"),
keywords: &["FUNGA", "ngahau"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("açık oku"),
keywords: &[
"açık oku",
"ok",
"ünlem",
"ünlem işareti",
"ünlem işareti ve okla AÇIK",
],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("ئوچۇق! كۆرسەتكۈچ"),
keywords: &["ئوچۇق", "ئوچۇق! كۆرسەتكۈچ", "بەلگە", "كۆرسەتكۈچ"],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("стрілка [ON!]"),
keywords: &["ввімкнено", "значок", "стрілка", "стрілка [ON!]"],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("آن! تیر"),
keywords: &["آن", "آن! تیر", "تیر"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("efirda chizig‘i"),
keywords: &["belgi", "chiziq", "efirda", "efirda chizig‘i"],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("mũi tên ON!"),
keywords: &["dấu", "mũi tên", "mũi tên ON!", "on"],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("fettu ci TAAL"),
keywords: &["fett", "fettu ci TAAL", "mark", "taal"],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("utolo oluthi iLayitile!"),
keywords: &["ilayitile", "uphawu", "utolo", "utolo oluthi iLayitile!"],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("Ọfa LORI!"),
keywords: &["lórí", "o\u{329}fà", "àmì", "Ọfa LORI!"],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("「在」箭咀"),
keywords: &["「在」箭咀", "在", "標誌", "箭咀"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("“在”箭咀"),
keywords: &["“在”箭咀", "在", "标志", "箭咀"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("ON! 箭头"),
keywords: &["ON", "ON! 箭头", "开始", "标识", "箭头"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("ON"),
keywords: &["ON"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("「On!」箭嘴"),
keywords: &["「On!」", "「On!」箭嘴"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("kuvuliwe! umcibisholo"),
keywords: &["kuvuliwe", "kuvuliwe! umcibisholo", "umcibisholo", "uphawu"],
},
],
};
#[doc = "🔜"]
pub const SOON_ARROW: crate::Emoji = crate::Emoji {
glyph: "🔜",
codepoint: "1F51C",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "SOON arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("binnekortpyl"),
keywords: &["binnekort", "binnekortpyl", "gou", "pyl"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("በቅርቡ ይመጣል ቀስት"),
keywords: &["ቀስት", "በቅርቡ", "በቅርቡ ይመጣል ቀስት"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("سهم علامة قريب\u{64b}ا"),
keywords: &[
"سهم",
"سهم علامة قريب\u{64b}ا",
"علامة قريب\u{64b}ا",
"قريب\u{64b}ا",
"قريب\u{64b}ا مع سهم أيمن",
],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some("শীঘ\u{9cd}ৰে ক\u{9be}\u{981}ড\u{9bc}"),
keywords: &[
"ক\u{9be}\u{981}ড\u{9bc}",
"শীঘ\u{9cd}ৰে ক\u{9be}\u{981}ড\u{9bc}",
"সোনক\u{9be}লে",
],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("tezliklə oxu"),
keywords: &["ox", "tezliklə", "tezliklə oxu"],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("стрэлка «неўзабаве»"),
keywords: &["неўзабаве", "стрэлка", "стрэлка «неўзабаве»"],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("стрелка със soon"),
keywords: &["soon", "стрелка", "стрелка със soon"],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("শীঘ\u{9cd}র তীর"),
keywords: &["তীর", "শীঘ\u{9cd}র তীর", "শীঘ\u{9cd}রই"],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelica za uskoro"),
keywords: &["strelica", "strelica za uskoro", "uskoro"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("fletxa amb la paraula Soon"),
keywords: &["Soon", "fletxa", "fletxa amb la paraula Soon", "paraula"],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᏞᎩᏭ ᎦᏝᏗ"),
keywords: &["ᎦᏝᏗ", "ᏞᎩᏭ"],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("šipka s nápisem SOON"),
keywords: &["brzy", "šipka", "šipka s nápisem SOON"],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("saeth SOON"),
keywords: &["cyn bo hir", "saeth", "saeth SOON"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("SOON-pil"),
keywords: &["SOON-pil", "snart med højrepil", "snart-tegn"],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("SOON-Pfeil"),
keywords: &["Pfeil", "SOON-Pfeil", "rechts"],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("βέλος SOON"),
keywords: &["βέλος", "βέλος SOON", "σύντομα"],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("SOON arrow"),
keywords: &["SOON arrow", "arrow", "soon"],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flecha SOON"),
keywords: &[
"flecha",
"flecha SOON",
"soon",
"soon con flecha a la derecha",
],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &["flecha", "flecha SOON", "pronto"],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("nool SOON"),
keywords: &["nool", "nool SOON", "varsti"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("“soon” testua duen gezia"),
keywords: &["gezi", "soon", "“soon” testua duen gezia"],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("پیکان به\u{200c}زودی"),
keywords: &["به\u{200c}زودی", "پیکان"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("SOON-nuoli"),
keywords: &["SOON-nuoli", "nuoli", "tulossa"],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("soon arrow"),
keywords: &["SOON", "arrow", "malapit na", "soon arrow"],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("SOON pílur"),
keywords: &["SOON pílur", "pílur", "soon"],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèche Bientôt"),
keywords: &["flèche", "flèche Bientôt"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("étiquette « SOON » et flèche"),
keywords: &["bientôt", "flèche", "soon", "étiquette « SOON » et flèche"],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("saighead GO LUATH"),
keywords: &["go luath agus saighead ar dheis", "saighead GO LUATH"],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("saighead “SOON”"),
keywords: &["a dh’aithghearr", "saighead", "saighead “SOON”"],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frecha coa mensaxe \"pronto\" en inglés"),
keywords: &[
"frecha",
"frecha coa mensaxe \"pronto\" en inglés",
"pronto",
"soon",
],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("જલ\u{acd}દીન\u{ac1}\u{a82} તીર"),
keywords: &[
"જમણા\u{a82} તીર સાથ\u{ac7} જલ\u{acd}દી",
"જલ\u{acd}દી",
"જલ\u{acd}દીન\u{ac1}\u{a82} ચિહ\u{acd}ન",
"જલ\u{acd}દીન\u{ac1}\u{a82} તીર",
"તીર",
],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("Kibiyar BA DA DAƊEWA BA"),
keywords: &["Kibiyar BA DA DAƊEWA BA", "ba da daɗewa ba", "kibiya"],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("חץ בקרוב"),
keywords: &["בקרוב", "חץ"],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("SOON तीर"),
keywords: &["SOON तीर", "जल\u{94d}दी", "तीर"],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("strelica s natpisom \"soon\""),
keywords: &["strelica", "strelica s natpisom \"soon\"", "uskoro"],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("hamarosan nyíl"),
keywords: &["hamarosan", "jön", "nyíl"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("շուտով գրությամբ սլաք"),
keywords: &["շուտով", "շուտով գրությամբ սլաք", "սլաք"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah SOON"),
keywords: &["panah", "segera", "tanda panah SOON"],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("UBE mgbe adịghị anya"),
keywords: &["UBE mgbe adịghị anya", "mgbe adịghị anya", "ube"],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("bráðum-ör"),
keywords: &[
"bráðum",
"bráðum með vinstri ör",
"bráðum-merki",
"bráðum-ör",
"ör",
],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("freccia SOON"),
keywords: &[
"freccia",
"freccia SOON",
"presto",
"simbolo soon",
"soon con freccia verso destra",
],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("SOON矢印"),
keywords: &["SOON", "SOON矢印", "すぐ", "矢印"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("panah SOON"),
keywords: &["gage", "panah", "panah SOON", "soon"],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ისარი soon"),
keywords: &["ისარი", "ისარი soon", "მალე"],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("Aneccab Qrib"),
keywords: &["Aneccab Qrib"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("«жақында» көрсеткісі"),
keywords: &["«жақында» көрсеткісі", "жақында", "көрсеткі"],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("snart"),
keywords: &["snart", "snart med højrepil", "snart-tegn"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some(
"ព\u{17d2}រ\u{17bd}ញទៅស\u{17d2}តា\u{17c6}មានអក\u{17d2}សរ Soon នៅព\u{17b8}ក\u{17d2}រោម",
),
keywords: &[
"SOON",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញទៅស\u{17d2}តា\u{17c6}មានអក\u{17d2}សរ Soon នៅព\u{17b8}ក\u{17d2}រោម",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ತಕ\u{ccd}ಷಣ ಚ\u{cbf}ಹ\u{ccd}ನ\u{cc6}"),
keywords: &[
"ತಕ\u{ccd}ಷಣ",
"ತಕ\u{ccd}ಷಣ ಚ\u{cbf}ಹ\u{ccd}ನ\u{cc6}",
"ಬಲ ಬಾಣದ ಗುರುತ\u{cbf}ನೊಂದ\u{cbf}ಗ\u{cc6} ತಕ\u{ccd}ಷಣ",
"ಬಾಣ",
],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("곧"),
keywords: &[
"곧",
"곧 진행된다는 신호",
"곧 진행된다는 표시",
"곧 진행된다는 화살표",
"화살표",
],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("रोखड\u{947}\u{902}च बाण"),
keywords: &["बाण", "रोखड\u{947}\u{902}च"],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("жакында жебеси"),
keywords: &["жакында", "жакында жебеси", "жебе"],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("GESCHWËNN-Feil"),
keywords: &["Feil", "GESCHWËNN-Feil", "geschwënn"],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນ ໄວໆນ\u{eb5}\u{ec9}"),
keywords: &["ລ\u{eb9}ກສອນ", "ໄວໆນ\u{eb5}\u{ec9}"],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("rodyklė su užrašu „Soon“"),
keywords: &["greitai", "rodyklė", "rodyklė su užrašu „Soon“"],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("bultiņa ar uzrakstu Soon"),
keywords: &["bultiņa", "bultiņa ar uzrakstu Soon", "drīz"],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pere ĀKUANEI"),
keywords: &["pere", "pere ĀKUANEI", "ākuanei"],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("стрелка за наскоро"),
keywords: &["наскоро", "стрелка", "стрелка за наскоро"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some(
"ഉടൻ തന\u{d4d}നെ എന\u{d4d}ന\u{d4d} അർത\u{d4d}ഥമ\u{d3e}ക\u{d4d}ക\u{d41}ന\u{d4d}ന അമ\u{d4d}പടയ\u{d3e}ളം",
),
keywords: &[
"അമ\u{d4d}പടയ\u{d3e}ളം",
"ഉടൻ തന\u{d4d}നെ എന\u{d4d}ന\u{d4d} അർത\u{d4d}ഥമ\u{d3e}ക\u{d4d}ക\u{d41}ന\u{d4d}ന അമ\u{d4d}പടയ\u{d3e}ളം",
"വലത\u{d4d}തോട\u{d4d}ട\u{d41}ള\u{d4d}ള ആരോയ\u{d4d}ക\u{d4d}കൊപ\u{d4d}പം ഉടനടി",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("удахгүй гэсэн сум"),
keywords: &["сум", "удахгүй", "удахгүй гэсэн сум"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("लवकरच बाण"),
keywords: &["बाण", "लवकरच"],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("anak panah ‘SOON’"),
keywords: &["anak panah", "anak panah ‘SOON’", "tidak lama lagi"],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("vleġġa SOON"),
keywords: &["mill-aktar fis", "vleġġa", "vleġġa SOON"],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("မကြာမ\u{102e} မြား"),
keywords: &[
"မကြာမ\u{102e} ပြ သင\u{103a}\u{1039}ကေတ",
"မကြာမ\u{102e} မြား",
"မကြာမ\u{102e}ပြ မြား သင\u{103a}\u{1039}ကေတ",
"မြား",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("SOON-pil"),
keywords: &["SOON-pil", "pil", "snart", "snart med pil"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("छिट\u{94d}ट\u{948} आउन\u{947} स\u{902}क\u{947}त गर\u{94d}न\u{947} वाण"),
keywords: &[
"आउन\u{947}",
"छिट\u{94d}ट\u{948} आउन\u{947} स\u{902}क\u{947}त गर\u{94d}न\u{947} वाण",
"वाण",
],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("SOON-pijl"),
keywords: &["SOON-pijl", "binnenkort", "pijl"],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("SOON-pil"),
keywords: &["SOON-pil", "pil", "snart", "snart med pil"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ଶୀଘ\u{b4d}ର ତୀର"),
keywords: &["ତୀର", "ଶୀଘ\u{b4d}ର"],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਜਲਦ ਹੀ ਵ\u{a71}ਲ ਇਸ\u{a3c}ਾਰਾ ਕਰਨ ਵਾਲਾ ਤੀਰ"),
keywords: &[
"ਜਲਦ ਹੀ ਵ\u{a71}ਲ ਇਸ\u{a3c}ਾਰਾ ਕਰਨ ਵਾਲਾ ਤੀਰ",
"ਜਲਦੀ",
"ਜਲਦੀ ਨਿਸ\u{a3c}ਾਨ",
"ਜਲਦੀ ਨਿਸ\u{a3c}ਾਨ ਸ\u{a71}ਜ\u{a47} ਤੀਰ ਨਾਲ",
"ਤੀਰ",
],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("چھیتی دا تیر"),
keywords: &["تیر", "چ\u{650}ھیتی", "چھیتی دا تیر"],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("SOON Áro"),
keywords: &["SOON Áro", "Soon", "Áro"],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("strzałka z napisem SOON"),
keywords: &["strzałka", "strzałka z napisem SOON", "wkrótce"],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("ژر غشی"),
keywords: &["غشی", "ژر"],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("seta \"SOON\""),
keywords: &["seta", "seta \"SOON\"", "soon"],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("seta SOON"),
keywords: &["brevemente", "seta", "seta SOON"],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("QATIQ wachʼi"),
keywords: &["QATIQ wachʼi"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("săgeată cu textul SOON"),
keywords: &["curând", "săgeată", "săgeată cu textul SOON"],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-728"),
keywords: &["E10-728"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("стрелка «скоро»"),
keywords: &["скоро", "стрелка «скоро»", "стрелка скоро"],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("akambi ka SOON"),
keywords: &["akambi ka SOON", "umwambi", "vuba cyane"],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("جلدي تير"),
keywords: &["تير", "جلدي"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("ඉක\u{dca}මන\u{dca} ඊතලය"),
keywords: &[
"ඉක\u{dca}මන\u{dca} ඊතලය",
"ඉක\u{dca}මන\u{dd2}න\u{dca}",
"ඊතලය",
],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("šípka SOON"),
keywords: &["čoskoro", "šípka", "šípka SOON"],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("puščica kmalu"),
keywords: &["kmalu", "puščica"],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("falaarta DHAKHSASHADA"),
keywords: &["dhakhsasho", "falaar", "falaarta DHAKHSASHADA"],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("shigjeta shpejt"),
keywords: &["shigjeta shpejt", "shigjetë", "shpejt"],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелица за ускоро"),
keywords: &["стрeлицa", "стрелица за ускоро", "ускoрo"],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelica za uskoro"),
keywords: &["strelica", "strelica za uskoro", "uskoro"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("Soon med högerpil ovanför"),
keywords: &["Soon med högerpil ovanför", "högerpil", "pil"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mshale unaoashiria hivi karibuni"),
keywords: &[
"hivi karibuni",
"mshale",
"mshale unaoashiria hivi karibuni",
],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some("ச\u{bc0}க\u{bcd}கிரம\u{bcd}"),
keywords: &[
"அம\u{bcd}புக\u{bcd}குறி",
"ச\u{bc0}க\u{bcd}கிரம\u{bcd}",
"விரைவில\u{bcd}",
],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("సమ\u{c40}ప\u{c3f}స\u{c4d}త\u{c4b}ంద\u{c3f} బ\u{c3e}ణం"),
keywords: &[
"త\u{c4d}వరల\u{c4b} ర\u{c3e}బ\u{c4b}తుంద\u{c3f}",
"దగ\u{c4d}గరల\u{c4b} ఉంద\u{c3f}",
"సమ\u{c40}ప\u{c3f}స\u{c4d}త\u{c4b}ంద\u{c3f} బ\u{c3e}ణం",
],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("тири \"ба қарибӣ\""),
keywords: &["ба қарибӣ", "тир", "тири \"ба қарибӣ\""],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("ส\u{e31}ญล\u{e31}กษณ\u{e4c}เร\u{e47}วๆ น\u{e35}\u{e49}"),
keywords: &[
"ล\u{e39}กศร",
"ส\u{e31}ญล\u{e31}กษณ\u{e4c}เร\u{e47}วๆ น\u{e35}\u{e49}",
],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("ቕልጡፍ ምልክት"),
keywords: &["ምልክት", "ቕልጡፍ"],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("SOON oky"),
keywords: &["SOON oky", "bahym", "ok"],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau OFI"),
keywords: &["OFI", "ngahau"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("yakında işareti"),
keywords: &["Yakında", "ok", "sağ okla Yakında", "yakında işareti"],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("يېقىندا دېگەن كۆرسەتكۈچ"),
keywords: &["كۆرسەتكۈچ", "يېقىندا", "يېقىندا دېگەن كۆرسەتكۈچ"],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("стрілка [SOON]"),
keywords: &["незабаром", "стрілка", "стрілка [SOON]"],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("جلد تیر"),
keywords: &["تیر", "جلد"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("tez orada chizig‘i"),
keywords: &["chiziq", "tez orada chizig‘i", "tezda"],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("mũi tên SOON"),
keywords: &["mũi tên", "mũi tên SOON", "soon"],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("fettu CI KANAM"),
keywords: &["ci kanam", "fett", "fettu CI KANAM"],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("utolo oluthi KUNGEKUDALA"),
keywords: &["kungekudala", "utolo", "utolo oluthi KUNGEKUDALA"],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("Ọfa LAIPẸ"),
keywords: &["láìpẹ\u{301}", "o\u{329}fà", "Ọfa LAIPẸ"],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("「不久」箭咀"),
keywords: &["「不久」箭咀", "不久", "箭咀"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("“不久”箭咀"),
keywords: &["“不久”箭咀", "不久", "箭咀"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("SOON 箭头"),
keywords: &["SOON 箭头", "箭头", "马上"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("馬上"),
keywords: &["馬上"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("soon 標誌的箭嘴"),
keywords: &["soon 標誌的箭嘴", "即將"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("umcibisholo wokushesha"),
keywords: &["ngokushesha", "umcibisholo", "umcibisholo wokushesha"],
},
],
};
#[doc = "🔝"]
pub const TOP_ARROW: crate::Emoji = crate::Emoji {
glyph: "🔝",
codepoint: "1F51D",
status: crate::Status::FullyQualified,
introduction_version: 0.6f32,
name: "TOP arrow",
group: "Symbols",
subgroup: "arrow",
is_variant: false,
variants: &[],
annotations: &[
#[cfg(feature = "af")]
crate::Annotation {
lang: "af",
tts: Some("toppyl"),
keywords: &["bopunt", "op", "pyl", "top", "toppyl"],
},
#[cfg(feature = "am")]
crate::Annotation {
lang: "am",
tts: Some("ከፍተኛ ጠቋሚ ቀስት"),
keywords: &["ላይ", "ቀስት", "ከፍተኛ", "ከፍተኛ ጠቋሚ ቀስት"],
},
#[cfg(feature = "ar")]
crate::Annotation {
lang: "ar",
tts: Some("السهم لأعلى"),
keywords: &["أعلى", "أعلى مع سهم لأعلى", "السهم لأعلى", "سهم لأعلى"],
},
#[cfg(feature = "as")]
crate::Annotation {
lang: "as",
tts: Some("শীৰ\u{9cd}ষ ক\u{9be}\u{981}ড\u{9bc}"),
keywords: &["ওপৰলৈ", "ক\u{9be}\u{981}ড\u{9bc}", "শীৰ\u{9cd}ষ"],
},
#[cfg(feature = "az")]
crate::Annotation {
lang: "az",
tts: Some("yuxarı yönəlmiş ox"),
keywords: &["ox", "yuxarı", "yuxarı yönəlmiş ox", "üst"],
},
#[cfg(feature = "be")]
crate::Annotation {
lang: "be",
tts: Some("стрэлка «верх»"),
keywords: &["дагары", "стрэлка", "стрэлка «верх»", "уверх", "угару"],
},
#[cfg(feature = "bg")]
crate::Annotation {
lang: "bg",
tts: Some("стрелка с top"),
keywords: &["top", "нагоре", "стрелка", "стрелка с top"],
},
#[cfg(feature = "bn")]
crate::Annotation {
lang: "bn",
tts: Some("শীর\u{9cd}ষের তীর"),
keywords: &[
"উর\u{9cd}দ\u{9cd}ধম\u{9c1}খী",
"তীর",
"শীর\u{9cd}ষ",
"শীর\u{9cd}ষের তীর",
],
},
#[cfg(feature = "bs")]
crate::Annotation {
lang: "bs",
tts: Some("strelica za gore"),
keywords: &["gore", "strelica", "strelica za gore"],
},
#[cfg(feature = "ca")]
crate::Annotation {
lang: "ca",
tts: Some("fletxa amb la paraula Top"),
keywords: &["Top", "fletxa", "fletxa amb la paraula Top", "paraula"],
},
#[cfg(feature = "chr")]
crate::Annotation {
lang: "chr",
tts: Some("ᎦᏚ ᎦᏝᏗ"),
keywords: &["ᎦᎸᎳᏗᏜ", "ᎦᏚ", "ᎦᏝᏗ"],
},
#[cfg(feature = "cs")]
crate::Annotation {
lang: "cs",
tts: Some("šipka s nápisem TOP"),
keywords: &[
"nahoru",
"neklopit",
"vzhůru",
"šipka",
"šipka s nápisem TOP",
],
},
#[cfg(feature = "cy")]
crate::Annotation {
lang: "cy",
tts: Some("saeth TOP"),
keywords: &["i fyny", "saeth", "saeth TOP", "top"],
},
#[cfg(feature = "da")]
crate::Annotation {
lang: "da",
tts: Some("TOP-pil"),
keywords: &["TOP-pil", "pil peger op", "toppil"],
},
#[cfg(feature = "de")]
crate::Annotation {
lang: "de",
tts: Some("TOP-Pfeil"),
keywords: &["Pfeil nach oben", "TOP-Pfeil"],
},
#[cfg(feature = "el")]
crate::Annotation {
lang: "el",
tts: Some("βέλος TOP"),
keywords: &["βέλος", "βέλος TOP", "επάνω", "κορυφή"],
},
#[cfg(feature = "en")]
crate::Annotation {
lang: "en",
tts: Some("TOP arrow"),
keywords: &["TOP arrow", "arrow", "top", "up"],
},
#[cfg(feature = "en_AU")]
crate::Annotation {
lang: "en_AU",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_CA")]
crate::Annotation {
lang: "en_CA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_GB")]
crate::Annotation {
lang: "en_GB",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "en_IN")]
crate::Annotation {
lang: "en_IN",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es")]
crate::Annotation {
lang: "es",
tts: Some("flecha TOP"),
keywords: &[
"arriba",
"flecha TOP",
"flecha hacia arriba",
"top",
"top con flecha hacia arriba",
],
},
#[cfg(feature = "es_419")]
crate::Annotation {
lang: "es_419",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_MX")]
crate::Annotation {
lang: "es_MX",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "es_US")]
crate::Annotation {
lang: "es_US",
tts: Some("↑↑↑"),
keywords: &["arriba", "flecha", "flecha TOP", "subir"],
},
#[cfg(feature = "et")]
crate::Annotation {
lang: "et",
tts: Some("nool TOP"),
keywords: &["nool", "nool TOP", "üles"],
},
#[cfg(feature = "eu")]
crate::Annotation {
lang: "eu",
tts: Some("“top” testua duen gezia"),
keywords: &["gezi", "gora", "top", "“top” testua duen gezia"],
},
#[cfg(feature = "fa")]
crate::Annotation {
lang: "fa",
tts: Some("پیکان رو"),
keywords: &["بالا", "برتر", "پیکان", "پیکان رو"],
},
#[cfg(feature = "fi")]
crate::Annotation {
lang: "fi",
tts: Some("TOP-nuoli"),
keywords: &["TOP-nuoli", "alkuun", "nuoli"],
},
#[cfg(feature = "fil")]
crate::Annotation {
lang: "fil",
tts: Some("top arrow"),
keywords: &["IBABAW", "arrow", "itaas", "top arrow", "tuktok"],
},
#[cfg(feature = "fo")]
crate::Annotation {
lang: "fo",
tts: Some("TOP pílur"),
keywords: &["TOP pílur", "pílur top"],
},
#[cfg(feature = "fr")]
crate::Annotation {
lang: "fr",
tts: Some("flèche En haut"),
keywords: &["flèche", "flèche En haut"],
},
#[cfg(feature = "fr_CA")]
crate::Annotation {
lang: "fr_CA",
tts: Some("étiquette « TOP » et flèche"),
keywords: &[
"dessus",
"flèche",
"haut",
"top",
"étiquette « TOP » et flèche",
],
},
#[cfg(feature = "ga")]
crate::Annotation {
lang: "ga",
tts: Some("saighead BHAIRR"),
keywords: &["barr agus saighead thuas", "saighead BHAIRR"],
},
#[cfg(feature = "gd")]
crate::Annotation {
lang: "gd",
tts: Some("saighead “TOP”"),
keywords: &[
"barr",
"ceann",
"mullach",
"saighead",
"saighead “TOP”",
"suas",
],
},
#[cfg(feature = "gl")]
crate::Annotation {
lang: "gl",
tts: Some("frecha coa mensaxe \"destacado\" en inglés"),
keywords: &[
"arriba",
"frecha",
"frecha coa mensaxe \"destacado\" en inglés",
"mellor",
"principal",
"top",
],
},
#[cfg(feature = "gu")]
crate::Annotation {
lang: "gu",
tts: Some("ટોચન\u{ac1}\u{a82} ચિહ\u{acd}ન"),
keywords: &[
"ઉપર",
"ઉપર તીર",
"ઉપરના\u{a82} તીર સાથ\u{ac7} ટોચ",
"ટોચ",
"ટોચન\u{ac1}\u{a82} ચિહ\u{acd}ન",
],
},
#[cfg(feature = "ha")]
crate::Annotation {
lang: "ha",
tts: Some("Kibiya KAI"),
keywords: &["Kibiya KAI", "kan", "kibiya", "sama"],
},
#[cfg(feature = "he")]
crate::Annotation {
lang: "he",
tts: Some("חץ מעלה"),
keywords: &["חץ", "חץ מעלה", "למעלה"],
},
#[cfg(feature = "hi")]
crate::Annotation {
lang: "hi",
tts: Some("TOP तीर"),
keywords: &[
"TOP तीर",
"ऊपर",
"ऊपर तीर",
"ऊपर तीर क\u{947} साथ शीर\u{94d}ष",
"शीर\u{94d}ष",
"शीर\u{94d}ष चिह\u{94d}न",
"शीर\u{94d}ष चिह\u{94d}न, ऊपर तीर क\u{947} साथ शीर\u{94d}ष",
],
},
#[cfg(feature = "hr")]
crate::Annotation {
lang: "hr",
tts: Some("strelica s natpisom \"top\""),
keywords: &["gore", "strelica", "strelica s natpisom \"top\"", "vrh"],
},
#[cfg(feature = "hu")]
crate::Annotation {
lang: "hu",
tts: Some("csúcsra nyíl"),
keywords: &["csúcsra nyíl", "felfelé", "nyíl", "top"],
},
#[cfg(feature = "hy")]
crate::Annotation {
lang: "hy",
tts: Some("վերև գրությամբ սլաք"),
keywords: &["սլաք", "վեր", "վերև", "վերև գրությամբ սլաք"],
},
#[cfg(feature = "id")]
crate::Annotation {
lang: "id",
tts: Some("tanda panah TOP"),
keywords: &["ke atas", "panah", "tanda panah TOP"],
},
#[cfg(feature = "ig")]
crate::Annotation {
lang: "ig",
tts: Some("UBE kachasị elu"),
keywords: &["UBE kachasị elu", "dị elu", "kachasị", "ube"],
},
#[cfg(feature = "is")]
crate::Annotation {
lang: "is",
tts: Some("efst-ör"),
keywords: &["efst", "efst-ör", "upp", "uppör", "ör"],
},
#[cfg(feature = "it")]
crate::Annotation {
lang: "it",
tts: Some("freccia TOP"),
keywords: &[
"Top con freccia verso l’alto",
"alto",
"freccia TOP",
"freccia verso l’alto",
"simbolo Top",
],
},
#[cfg(feature = "ja")]
crate::Annotation {
lang: "ja",
tts: Some("TOP矢印"),
keywords: &["TOP", "TOP矢印", "トップ", "上", "矢印"],
},
#[cfg(feature = "jv")]
crate::Annotation {
lang: "jv",
tts: Some("panah TOP"),
keywords: &["munggah", "panah", "panah TOP", "top"],
},
#[cfg(feature = "ka")]
crate::Annotation {
lang: "ka",
tts: Some("ისარი top"),
keywords: &["ზემოთ", "ისარი", "ისარი top"],
},
#[cfg(feature = "kab")]
crate::Annotation {
lang: "kab",
tts: Some("aneccab n ufalla"),
keywords: &["aneccab n ufalla"],
},
#[cfg(feature = "kk")]
crate::Annotation {
lang: "kk",
tts: Some("«негізгі» көрсеткісі"),
keywords: &["«негізгі» көрсеткісі", "жоғары", "көрсеткі", "үстіне"],
},
#[cfg(feature = "kl")]
crate::Annotation {
lang: "kl",
tts: Some("top"),
keywords: &["pil peger op", "top", "toppil"],
},
#[cfg(feature = "km")]
crate::Annotation {
lang: "km",
tts: Some("ព\u{17d2}រ\u{17bd}ញទៅលើមានអក\u{17d2}សរ Top នៅព\u{17b8}ក\u{17d2}រោម"),
keywords: &[
"TOP",
"ព\u{17d2}រ\u{17bd}ញ",
"ព\u{17d2}រ\u{17bd}ញទៅលើមានអក\u{17d2}សរ Top នៅព\u{17b8}ក\u{17d2}រោម",
],
},
#[cfg(feature = "kn")]
crate::Annotation {
lang: "kn",
tts: Some("ಮೇಲ\u{cbf}ನ ಬಾಣದ ಗುರುತು"),
keywords: &[
"ಮೇಲ\u{cbf}ನ ಬಾಣ",
"ಮೇಲ\u{cbf}ನ ಬಾಣದ ಗುರುತ\u{cbf}ನೊಂದ\u{cbf}ಗ\u{cc6} ಮೇಲ\u{cc6}",
"ಮೇಲ\u{cbf}ನ ಬಾಣದ ಗುರುತು",
"ಮೇಲ\u{cc6}",
"ಮೇಲ\u{cc6} ಚ\u{cbf}ಹ\u{ccd}ನ\u{cc6}",
],
},
#[cfg(feature = "ko")]
crate::Annotation {
lang: "ko",
tts: Some("위로 화살표"),
keywords: &["상단", "위로 화살표", "위쪽 화살표", "위쪽으로", "화살표"],
},
#[cfg(feature = "kok")]
crate::Annotation {
lang: "kok",
tts: Some("वयर ख\u{942}ण करपी बाण"),
keywords: &[
"बाण",
"वयर",
"वयर ख\u{941}णावपी बाण",
"वयर ख\u{942}ण करपी बाण",
],
},
#[cfg(feature = "ky")]
crate::Annotation {
lang: "ky",
tts: Some("башында жебеси"),
keywords: &["башы", "башында жебеси", "жебе", "өйдө"],
},
#[cfg(feature = "lb")]
crate::Annotation {
lang: "lb",
tts: Some("TOP-Feil"),
keywords: &["Feil", "TOP-Feil", "Top", "no uewen"],
},
#[cfg(feature = "lo")]
crate::Annotation {
lang: "lo",
tts: Some("ລ\u{eb9}ກສອນຊ\u{eb5}\u{ec9}ຂ\u{eb6}\u{ec9}ນ"),
keywords: &[
"ຂ\u{eb6}\u{ec9}ນ",
"ລ\u{eb9}ກສອນ",
"ລ\u{eb9}ກສອນຊ\u{eb5}\u{ec9}ຂ\u{eb6}\u{ec9}ນ",
"ເທ\u{eb4}ງສ\u{eb8}ດ",
],
},
#[cfg(feature = "lt")]
crate::Annotation {
lang: "lt",
tts: Some("rodyklė su užrašu „Top“"),
keywords: &["rodyklė", "rodyklė su užrašu „Top“", "viršus", "į viršų"],
},
#[cfg(feature = "lv")]
crate::Annotation {
lang: "lv",
tts: Some("bultiņa ar uzrakstu Top"),
keywords: &["augšup", "bultiņa", "bultiņa ar uzrakstu Top"],
},
#[cfg(feature = "mi")]
crate::Annotation {
lang: "mi",
tts: Some("pere RUNGA"),
keywords: &["ake", "pere", "pere RUNGA", "runga"],
},
#[cfg(feature = "mk")]
crate::Annotation {
lang: "mk",
tts: Some("стрелка за горе"),
keywords: &["горе", "стрелка", "стрелка за горе"],
},
#[cfg(feature = "ml")]
crate::Annotation {
lang: "ml",
tts: Some(
"മ\u{d41}കളിലേക\u{d4d}ക\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളത\u{d4d}തിനൊപ\u{d4d}പം ടോപ\u{d4d}പ\u{d4d}",
),
keywords: &[
"അമ\u{d4d}പടയ\u{d3e}ളം",
"മ\u{d41}കളിലേക\u{d4d}ക\u{d41}ള\u{d4d}ള അമ\u{d4d}പടയ\u{d3e}ളത\u{d4d}തിനൊപ\u{d4d}പം ടോപ\u{d4d}പ\u{d4d}",
"മ\u{d41}കളിലേക\u{d4d}ക\u{d41}ള\u{d4d}ള ആരോയ\u{d4d}ക\u{d4d}കൊപ\u{d4d}പം ടോപ\u{d4d}പ\u{d4d}",
"മ\u{d41}കളിൽ",
],
},
#[cfg(feature = "mn")]
crate::Annotation {
lang: "mn",
tts: Some("дээшээ гэсэн сум"),
keywords: &["дээш", "дээшээ гэсэн сум", "сум"],
},
#[cfg(feature = "mr")]
crate::Annotation {
lang: "mr",
tts: Some("शीर\u{94d}ष बाण"),
keywords: &["बाण", "वर", "शीर\u{94d}ष बाण"],
},
#[cfg(feature = "ms")]
crate::Annotation {
lang: "ms",
tts: Some("anak panah ‘TOP’"),
keywords: &["anak panah", "anak panah ‘TOP’", "atas", "top"],
},
#[cfg(feature = "mt")]
crate::Annotation {
lang: "mt",
tts: Some("vleġġa TOP"),
keywords: &["fuq", "vleġġa", "vleġġa TOP", "’il fuq"],
},
#[cfg(feature = "my")]
crate::Annotation {
lang: "my",
tts: Some("ထ\u{102d}ပ\u{103a}သ\u{102d}\u{102f}\u{1037} မြား"),
keywords: &[
"ထ\u{102d}ပ\u{103a}သ\u{102d}\u{102f}\u{1037} မြား",
"မြား",
"အထက\u{103a}သ\u{102d}\u{102f}\u{1037}",
"အပေါ\u{103a}သ\u{102d}\u{102f}\u{1037}",
"အပေါ\u{103a}သ\u{102d}\u{102f}\u{1037}ပြ မြား သင\u{103a}\u{1039}ကေတ",
],
},
#[cfg(feature = "nb")]
crate::Annotation {
lang: "nb",
tts: Some("TOP-pil"),
keywords: &["TOP", "TOP-pil", "opp", "pil opp", "topp", "topp med pil"],
},
#[cfg(feature = "ne")]
crate::Annotation {
lang: "ne",
tts: Some("माथिको स\u{902}क\u{947}त गर\u{94d}न\u{947} वाण"),
keywords: &[
"माथि",
"माथिको स\u{902}क\u{947}त गर\u{94d}न\u{947} वाण",
"वाण",
"शीर\u{94d}ष",
],
},
#[cfg(feature = "nl")]
crate::Annotation {
lang: "nl",
tts: Some("TOP-pijl"),
keywords: &["TOP-pijl", "bovenkant", "omhoog", "pijl"],
},
#[cfg(feature = "nn")]
crate::Annotation {
lang: "nn",
tts: Some("TOP-pil"),
keywords: &["TOP", "TOP-pil", "opp", "pil opp", "topp", "topp med pil"],
},
#[cfg(feature = "or")]
crate::Annotation {
lang: "or",
tts: Some("ଶୀର\u{b4d}ଷ ତୀର"),
keywords: &["ଉପର", "ତୀର", "ଶୀର\u{b4d}ଷ"],
},
#[cfg(feature = "pa")]
crate::Annotation {
lang: "pa",
tts: Some("ਉ\u{a71}ਪਰ ਵ\u{a71}ਲ ਇਸ\u{a3c}ਾਰਾ ਕਰਨ ਵਾਲਾ ਤੀਰ"),
keywords: &[
"ਉ\u{a71}ਤ\u{a47}",
"ਉ\u{a71}ਤ\u{a47} ਤੀਰ",
"ਉ\u{a71}ਤ\u{a47} ਤੀਰ ਨਾਲ ਉ\u{a71}ਪਰ",
"ਉ\u{a71}ਪਰ",
"ਉ\u{a71}ਪਰ ਦਾ ਨਿਸ\u{a3c}ਾਨ",
"ਉ\u{a71}ਪਰ ਵ\u{a71}ਲ ਇਸ\u{a3c}ਾਰਾ ਕਰਨ ਵਾਲਾ ਤੀਰ",
],
},
#[cfg(feature = "pa_Arab")]
crate::Annotation {
lang: "pa_Arab",
tts: Some("اوتلا تیر"),
keywords: &["اوتلا تیر", "ا\u{64f}وتے", "تیر", "چوٹی"],
},
#[cfg(feature = "pcm")]
crate::Annotation {
lang: "pcm",
tts: Some("TOP Áro"),
keywords: &["TOP Áro", "Tọp", "Áro", "Ọp"],
},
#[cfg(feature = "pl")]
crate::Annotation {
lang: "pl",
tts: Some("strzałka z napisem TOP"),
keywords: &["góra", "strzałka", "strzałka z napisem TOP"],
},
#[cfg(feature = "ps")]
crate::Annotation {
lang: "ps",
tts: Some("په سر غشی"),
keywords: &["سر", "غشی", "په سر غشی", "پورته"],
},
#[cfg(feature = "pt")]
crate::Annotation {
lang: "pt",
tts: Some("seta \"TOP\""),
keywords: &[
"para cima",
"seta",
"seta \"TOP",
"seta \"TOP\"",
"seta para cima",
"top",
],
},
#[cfg(feature = "pt_PT")]
crate::Annotation {
lang: "pt_PT",
tts: Some("seta TOP"),
keywords: &["parte superior", "seta", "seta TOP"],
},
#[cfg(feature = "qu")]
crate::Annotation {
lang: "qu",
tts: Some("HANAQ wachʼi"),
keywords: &["HANAQ wachʼi"],
},
#[cfg(feature = "ro")]
crate::Annotation {
lang: "ro",
tts: Some("săgeată cu textul TOP"),
keywords: &["sus", "săgeată", "săgeată cu textul TOP"],
},
#[cfg(feature = "root")]
crate::Annotation {
lang: "root",
tts: Some("E10-729"),
keywords: &["E10-729"],
},
#[cfg(feature = "ru")]
crate::Annotation {
lang: "ru",
tts: Some("стрелка «верх»"),
keywords: &[
"вверх",
"верх",
"наверх",
"стрелка «верх»",
"стрелка вверх",
"топ",
],
},
#[cfg(feature = "rw")]
crate::Annotation {
lang: "rw",
tts: Some("akambi ka TOP"),
keywords: &["akambi ka TOP", "hejuru", "umwambi", "zamuka"],
},
#[cfg(feature = "sd")]
crate::Annotation {
lang: "sd",
tts: Some("مٿيون تير"),
keywords: &["تير", "مٿي", "مٿيون تير"],
},
#[cfg(feature = "si")]
crate::Annotation {
lang: "si",
tts: Some("ඉහළ ඊතලය"),
keywords: &["ඉහළ", "ඊතලය", "ම\u{dd4}ද\u{dd4}න"],
},
#[cfg(feature = "sk")]
crate::Annotation {
lang: "sk",
tts: Some("šípka TOP"),
keywords: &["nahor", "navrch", "šípka", "šípka TOP"],
},
#[cfg(feature = "sl")]
crate::Annotation {
lang: "sl",
tts: Some("puščica na vrh"),
keywords: &["navzgor", "puščica", "puščica na vrh", "zgoraj"],
},
#[cfg(feature = "so")]
crate::Annotation {
lang: "so",
tts: Some("falaarta SARE"),
keywords: &["falaarta SARE", "fallaar", "kor", "sare"],
},
#[cfg(feature = "sq")]
crate::Annotation {
lang: "sq",
tts: Some("shigjetë lart"),
keywords: &["kreu", "lart", "shigjetë"],
},
#[cfg(feature = "sr")]
crate::Annotation {
lang: "sr",
tts: Some("стрелица за врх"),
keywords: &["врх", "гoрe", "стрeлицa", "стрелица за врх"],
},
#[cfg(feature = "sr_Cyrl_BA")]
crate::Annotation {
lang: "sr_Cyrl_BA",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "sr_Latn")]
crate::Annotation {
lang: "sr_Latn",
tts: Some("strelica za vrh"),
keywords: &["gore", "strelica", "strelica za vrh", "vrh"],
},
#[cfg(feature = "sv")]
crate::Annotation {
lang: "sv",
tts: Some("Top med uppåtpil"),
keywords: &["Top med uppåtpil", "pil", "uppåtpil"],
},
#[cfg(feature = "sw")]
crate::Annotation {
lang: "sw",
tts: Some("mshale unaoangalia juu"),
keywords: &[
"juu",
"mshale",
"mshale unaoangalia juu",
"mshale unaoelekea juu",
],
},
#[cfg(feature = "sw_KE")]
crate::Annotation {
lang: "sw_KE",
tts: Some("↑↑↑"),
keywords: &["↑↑↑"],
},
#[cfg(feature = "ta")]
crate::Annotation {
lang: "ta",
tts: Some("மேல\u{bcd}"),
keywords: &["மேலே", "மேல\u{bcd}", "மேல\u{bcd} அம\u{bcd}புக\u{bcd}குறி"],
},
#[cfg(feature = "te")]
crate::Annotation {
lang: "te",
tts: Some("ప\u{c48}క\u{c3f} బ\u{c3e}ణం"),
keywords: &["ప\u{c48}క\u{c3f}", "బ\u{c3e}ణం"],
},
#[cfg(feature = "tg")]
crate::Annotation {
lang: "tg",
tts: Some("тири \"БОЛОЙ\""),
keywords: &["боло", "тир", "тири \"БОЛОЙ\""],
},
#[cfg(feature = "th")]
crate::Annotation {
lang: "th",
tts: Some("ส\u{e31}ญล\u{e31}กษณ\u{e4c}บน"),
keywords: &["ล\u{e39}กศร", "ส\u{e31}ญล\u{e31}กษณ\u{e4c}บน"],
},
#[cfg(feature = "ti")]
crate::Annotation {
lang: "ti",
tts: Some("ላዕለዋይ ምልክት"),
keywords: &["ላዕለዋይ", "ምልክት", "ንላዕሊ"],
},
#[cfg(feature = "tk")]
crate::Annotation {
lang: "tk",
tts: Some("TOP oky"),
keywords: &["TOP oky", "ok", "üst", "ýokary"],
},
#[cfg(feature = "to")]
crate::Annotation {
lang: "to",
tts: Some("ngahau TUMUTUMU"),
keywords: &["TUMUTUMU", "hake", "ngahau"],
},
#[cfg(feature = "tr")]
crate::Annotation {
lang: "tr",
tts: Some("yukarı oku"),
keywords: &[
"Yukarı",
"yukarı",
"yukarı ok",
"yukarı okla Yukarı",
"yukarı oku",
],
},
#[cfg(feature = "ug")]
crate::Annotation {
lang: "ug",
tts: Some("چوققا كۆرسەتكۈچ"),
keywords: &["ئۈستى", "كۆرسەتكۈچ", "يۇقىرى", "چوققا كۆرسەتكۈچ"],
},
#[cfg(feature = "uk")]
crate::Annotation {
lang: "uk",
tts: Some("стрілка [TOP]"),
keywords: &["вверх", "вгору", "стрілка", "стрілка [TOP]"],
},
#[cfg(feature = "ur")]
crate::Annotation {
lang: "ur",
tts: Some("بالا تیر"),
keywords: &["اوپر", "بالا تیر", "تیر"],
},
#[cfg(feature = "uz")]
crate::Annotation {
lang: "uz",
tts: Some("tepaga chizig‘i"),
keywords: &["chiziq", "tepaga", "tepaga chizig‘i", "yuqoriga"],
},
#[cfg(feature = "vi")]
crate::Annotation {
lang: "vi",
tts: Some("mũi tên TOP"),
keywords: &["lên", "mũi tên", "mũi tên TOP", "top"],
},
#[cfg(feature = "wo")]
crate::Annotation {
lang: "wo",
tts: Some("fettu COLLU"),
keywords: &["cat", "fett", "fettu COLLU", "kaw"],
},
#[cfg(feature = "xh")]
crate::Annotation {
lang: "xh",
tts: Some("utolo oluthi PHEZULU"),
keywords: &["phezulu", "utolo", "utolo oluthi PHEZULU"],
},
#[cfg(feature = "yo")]
crate::Annotation {
lang: "yo",
tts: Some("Ọfa LOKE"),
keywords: &["orí òkè", "o\u{329}fà", "òkè", "Ọfa LOKE"],
},
#[cfg(feature = "yue")]
crate::Annotation {
lang: "yue",
tts: Some("「頂部」箭咀"),
keywords: &["「頂部」箭咀", "向上", "箭咀", "頂部"],
},
#[cfg(feature = "yue_Hans")]
crate::Annotation {
lang: "yue_Hans",
tts: Some("“顶部”箭咀"),
keywords: &["“顶部”箭咀", "向上", "箭咀", "顶部"],
},
#[cfg(feature = "zh")]
crate::Annotation {
lang: "zh",
tts: Some("置顶"),
keywords: &["向上", "标识", "置顶"],
},
#[cfg(feature = "zh_Hant")]
crate::Annotation {
lang: "zh_Hant",
tts: Some("置頂"),
keywords: &["置頂"],
},
#[cfg(feature = "zh_Hant_HK")]
crate::Annotation {
lang: "zh_Hant_HK",
tts: Some("top 標誌的箭嘴"),
keywords: &["top 標誌的箭嘴", "向上"],
},
#[cfg(feature = "zu")]
crate::Annotation {
lang: "zu",
tts: Some("umcibisholo waphezulu"),
keywords: &["phezulu", "umcibisholo", "umcibisholo waphezulu"],
},
],
};
| 32.230277 | 166 | 0.399813 |
5d1cd1e3373fc4620fc62aad63e9474358249757 | 3,524 | use std::collections::HashMap;
use std::sync::Arc;
use request::SapperRequest;
use response::SapperResponse;
use handler::SapperHandler;
use app::Result;
use app::Error;
use app::PathParams;
use app::Key;
use hyper::method::Method;
use recognizer::Router as Recognizer;
use recognizer::{Match, Params};
impl Key for PathParams { type Value = Params; }
/// `Router` provides an interface for creating complex routes as middleware
/// for the Iron framework.
pub struct Router {
// The routers, specialized by method.
routers: HashMap<Method, Recognizer<Arc<Box<SapperHandler>>>>,
// Routes that accept any method.
wildcard: Recognizer<Arc<Box<SapperHandler>>>
}
impl Router {
pub fn new() -> Router {
Router {
routers: HashMap::new(),
wildcard: Recognizer::new()
}
}
pub fn route<S>(&mut self, method: Method,
glob: S, handler: Arc<Box<SapperHandler>>) -> &mut Router
where S: AsRef<str> {
self.routers.entry(method).or_insert(Recognizer::new())
.add(glob.as_ref(), handler);
self
}
fn recognize(&self, method: &Method, path: &str)
-> Option<Match<&Arc<Box<SapperHandler>>>> {
self.routers.get(method).and_then(|router| router.recognize(path).ok())
.or(self.wildcard.recognize(path).ok())
}
// fn handle_options(&self, path: &str) -> Response {
// static METHODS: &'static [method::Method] =
// &[method::Get, method::Post, method::Post, method::Put,
// method::Delete, method::Head, method::Patch];
// // Get all the available methods and return them.
// let mut options = vec![];
// for method in METHODS.iter() {
// self.routers.get(method).map(|router| {
// if let Some(_) = router.recognize(path).ok() {
// options.push(method.clone());
// }
// });
// }
// // If GET is there, HEAD is also there.
// if options.contains(&method::Get) && !options.contains(&method::Head) {
// options.push(method::Head);
// }
// let mut res = Response::with(status::StatusCode::Ok);
// res.headers.set(headers::Allow(options));
// res
// }
// Tests for a match by adding or removing a trailing slash.
// fn redirect_slash(&self, req : &Request) -> Option<Error> {
// let mut url = req.url.clone();
// let mut path = url.path.join("/");
// if let Some(last_char) = path.chars().last() {
// if last_char == '/' {
// path.pop();
// url.path.pop();
// } else {
// path.push('/');
// url.path.push(String::new());
// }
// }
// self.recognize(&req.method(), &path).and(
// Some(Error::new(TrailingSlash,
// (status::MovedPermanently, Redirect(url))))
// )
// }
pub fn handle_method(&self, req: &mut SapperRequest, path: &str) -> Result<SapperResponse> {
if let Some(matched) = self.recognize(req.method(), path) {
req.ext_mut().insert::<PathParams>(matched.params);
matched.handler.handle(req)
} else {
// panic!("router not matched!");
// self.redirect_slash(req).and_then(|redirect| Some(Err(redirect)))
Err(Error::NotFound)
}
}
}
| 32.036364 | 96 | 0.54143 |
8f6adebb655e9a4c9aa65eb39f8d5adfa5fa897b | 2,204 | #[test]
fn align_flex_start_with_shrinking_children_with_stretch() {
let mut stretch = stretch::Stretch::new();
let node000 = stretch
.new_node(stretch::style::Style { flex_grow: 1f32, flex_shrink: 1f32, ..Default::default() }, &[])
.unwrap();
let node00 = stretch
.new_node(stretch::style::Style { flex_grow: 1f32, flex_shrink: 1f32, ..Default::default() }, &[node000])
.unwrap();
let node0 = stretch
.new_node(
stretch::style::Style { align_items: stretch::style::AlignItems::FlexStart, ..Default::default() },
&[node00],
)
.unwrap();
let node = stretch
.new_node(
stretch::style::Style {
size: stretch::geometry::Size {
width: stretch::style::Dimension::Points(500f32),
height: stretch::style::Dimension::Points(500f32),
..Default::default()
},
..Default::default()
},
&[node0],
)
.unwrap();
stretch.compute_layout(node, stretch::geometry::Size::undefined()).unwrap();
assert_eq!(stretch.layout(node).unwrap().size.width, 500f32);
assert_eq!(stretch.layout(node).unwrap().size.height, 500f32);
assert_eq!(stretch.layout(node).unwrap().location.x, 0f32);
assert_eq!(stretch.layout(node).unwrap().location.y, 0f32);
assert_eq!(stretch.layout(node0).unwrap().size.width, 0f32);
assert_eq!(stretch.layout(node0).unwrap().size.height, 500f32);
assert_eq!(stretch.layout(node0).unwrap().location.x, 0f32);
assert_eq!(stretch.layout(node0).unwrap().location.y, 0f32);
assert_eq!(stretch.layout(node00).unwrap().size.width, 0f32);
assert_eq!(stretch.layout(node00).unwrap().size.height, 0f32);
assert_eq!(stretch.layout(node00).unwrap().location.x, 0f32);
assert_eq!(stretch.layout(node00).unwrap().location.y, 0f32);
assert_eq!(stretch.layout(node000).unwrap().size.width, 0f32);
assert_eq!(stretch.layout(node000).unwrap().size.height, 0f32);
assert_eq!(stretch.layout(node000).unwrap().location.x, 0f32);
assert_eq!(stretch.layout(node000).unwrap().location.y, 0f32);
}
| 46.893617 | 113 | 0.623412 |
21713154a1edc6b0c3b1a1ac62f37c783b55f8f9 | 3,183 | use anyhow::{Context, Result};
use std::str::FromStr;
pub fn part1(input: &str) -> Result<String> {
let commands = parse(input)?;
let mut loc = Location::default();
for c in commands {
loc.update(&c)
}
log::trace!("final location: {:?}", loc);
Ok(format!("{}", loc.x * loc.depth))
}
pub fn part2(input: &str) -> Result<String> {
let commands = parse(input)?;
let mut loc = Location::default();
for c in commands {
loc.update_with_aim(&c)
}
log::trace!("final location: {:?}", loc);
Ok(format!("{}", loc.x * loc.depth))
}
fn parse(input: &str) -> Result<Vec<Command>> {
input.lines().map(|l| l.parse::<Command>()).collect()
}
#[derive(Debug, Clone, Copy, PartialEq)]
enum Direction {
Up,
Down,
Forward,
}
impl FromStr for Direction {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(match s {
"up" => Direction::Up,
"down" => Direction::Down,
"forward" => Direction::Forward,
_ => anyhow::bail!("unable to parse direction: {:?}", s),
})
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
struct Command {
direction: Direction,
units: u32,
}
impl FromStr for Command {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut split = s.split_whitespace();
let (dir, mag) = split
.next()
.and_then(|first| split.next().map(|second| (first, second)))
.ok_or_else(|| anyhow::anyhow!("unable to parse command from {:?}", s))?;
let units = mag
.parse::<u32>()
.with_context(|| format!("unable to parse number: {:?}", mag))?;
let direction = dir.parse::<Direction>()?;
Ok(Command { direction, units })
}
}
#[derive(Debug, Clone, Copy, PartialEq, Default)]
struct Location {
x: i32,
aim: i32,
depth: i32,
}
impl Location {
fn update(&mut self, cmd: &Command) {
let delta = cmd.units as i32;
match cmd.direction {
Direction::Up => self.depth -= delta,
Direction::Down => self.depth += delta,
Direction::Forward => self.x += delta,
}
}
fn update_with_aim(&mut self, cmd: &Command) {
let delta = cmd.units as i32;
match cmd.direction {
Direction::Up => self.aim -= delta,
Direction::Down => self.aim += delta,
Direction::Forward => {
self.x += delta;
self.depth += delta * self.aim;
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
const INPUT: &str = include_str!("../../../input/day2");
const EX: &str = include_str!("../../../input/day2_ex");
#[test]
fn verify_p1() {
assert_eq!(part1(INPUT).unwrap().as_str(), "1648020")
}
#[test]
fn verify_p2() {
assert_eq!(part2(INPUT).unwrap().as_str(), "1759818555")
}
#[test]
fn check_example() {
assert_eq!(part1(EX).unwrap().as_str(), "150")
}
#[test]
fn check_example_pt2() {
assert_eq!(part2(EX).unwrap().as_str(), "900")
}
}
| 25.878049 | 85 | 0.532202 |
d74ce91bf95a5eeeed8e50877e24808d45f4f365 | 11,902 | //! Provides userspace applications with the ability to communicate over the SPI
//! bus as a peripheral. Only supports chip select 0.
use core::cell::Cell;
use core::cmp;
use kernel::grant::{AllowRoCount, AllowRwCount, Grant, GrantKernelData, UpcallCount};
use kernel::hil::spi::ClockPhase;
use kernel::hil::spi::ClockPolarity;
use kernel::hil::spi::{SpiSlaveClient, SpiSlaveDevice};
use kernel::processbuffer::{ReadableProcessBuffer, WriteableProcessBuffer};
use kernel::syscall::{CommandReturn, SyscallDriver};
use kernel::utilities::cells::{OptionalCell, TakeCell};
use kernel::{ErrorCode, ProcessId};
/// Syscall driver number.
use crate::driver;
pub const DRIVER_NUM: usize = driver::NUM::SpiPeripheral as usize;
/// Ids for read-only allow buffers
mod ro_allow {
pub const WRITE: usize = 0;
/// The number of allow buffers the kernel stores for this grant
pub const COUNT: usize = 1;
}
/// Ids for read-write allow buffers
mod rw_allow {
pub const READ: usize = 0;
/// The number of allow buffers the kernel stores for this grant
pub const COUNT: usize = 1;
}
/// Suggested length for the SPI read and write buffer
pub const DEFAULT_READ_BUF_LENGTH: usize = 1024;
pub const DEFAULT_WRITE_BUF_LENGTH: usize = 1024;
// Since we provide an additional callback in slave mode for
// when the chip is selected, we have added a "PeripheralApp" struct
// that includes this new callback field.
#[derive(Default)]
pub struct PeripheralApp {
len: usize,
index: usize,
}
pub struct SpiPeripheral<'a, S: SpiSlaveDevice> {
spi_slave: &'a S,
busy: Cell<bool>,
kernel_read: TakeCell<'static, [u8]>,
kernel_write: TakeCell<'static, [u8]>,
kernel_len: Cell<usize>,
grants: Grant<
PeripheralApp,
UpcallCount<2>,
AllowRoCount<{ ro_allow::COUNT }>,
AllowRwCount<{ rw_allow::COUNT }>,
>,
current_process: OptionalCell<ProcessId>,
}
impl<'a, S: SpiSlaveDevice> SpiPeripheral<'a, S> {
pub fn new(
spi_slave: &'a S,
grants: Grant<
PeripheralApp,
UpcallCount<2>,
AllowRoCount<{ ro_allow::COUNT }>,
AllowRwCount<{ rw_allow::COUNT }>,
>,
) -> SpiPeripheral<'a, S> {
SpiPeripheral {
spi_slave: spi_slave,
busy: Cell::new(false),
kernel_len: Cell::new(0),
kernel_read: TakeCell::empty(),
kernel_write: TakeCell::empty(),
grants,
current_process: OptionalCell::empty(),
}
}
pub fn config_buffers(&mut self, read: &'static mut [u8], write: &'static mut [u8]) {
let len = cmp::min(read.len(), write.len());
self.kernel_len.set(len);
self.kernel_read.replace(read);
self.kernel_write.replace(write);
}
// Assumes checks for busy/etc. already done
// Updates app.index to be index + length of op
fn do_next_read_write(&self, app: &mut PeripheralApp, kernel_data: &GrantKernelData) {
let write_len = self.kernel_write.map_or(0, |kwbuf| {
let mut start = app.index;
let tmp_len = kernel_data
.get_readonly_processbuffer(ro_allow::WRITE)
.and_then(|write| {
write.enter(|src| {
let len = cmp::min(app.len - start, self.kernel_len.get());
let end = cmp::min(start + len, src.len());
start = cmp::min(start, end);
for (i, c) in src[start..end].iter().enumerate() {
kwbuf[i] = c.get();
}
end - start
})
})
.unwrap_or(0);
app.index = start + tmp_len;
tmp_len
});
// TODO verify SPI return value
let _ = self.spi_slave.read_write_bytes(
self.kernel_write.take(),
self.kernel_read.take(),
write_len,
);
}
}
impl<S: SpiSlaveDevice> SyscallDriver for SpiPeripheral<'_, S> {
/// Provide read/write buffers to SpiPeripheral
///
/// - allow_num 0: Provides a buffer to receive transfers into.
/// Provide read-only buffers to SpiPeripheral
///
/// - allow_num 0: Provides a buffer to transmit
/// - 0: check if present
/// - 1: read/write buffers
/// - read and write buffers optional
/// - fails if arg1 (bytes to write) >
/// write_buffer.len()
/// - 2: get chip select
/// - returns current selected peripheral
/// - in slave mode, always returns 0
/// - 3: set clock phase on current peripheral
/// - 0 is sample leading
/// - non-zero is sample trailing
/// - 4: get clock phase on current peripheral
/// - 0 is sample leading
/// - non-zero is sample trailing
/// - 5: set clock polarity on current peripheral
/// - 0 is idle low
/// - non-zero is idle high
/// - 6: get clock polarity on current peripheral
/// - 0 is idle low
/// - non-zero is idle high
/// - x: lock spi
/// - if you perform an operation without the lock,
/// it implicitly acquires the lock before the
/// operation and releases it after
/// - while an app holds the lock no other app can issue
/// operations on SPI (they are buffered)
/// - not implemented or currently supported
/// - x+1: unlock spi
/// - does nothing if lock not held
/// - not implemented or currently supported
fn command(
&self,
command_num: usize,
arg1: usize,
_: usize,
process_id: ProcessId,
) -> CommandReturn {
if command_num == 0 {
// Handle this first as it should be returned unconditionally.
return CommandReturn::success();
}
// Check if this driver is free, or already dedicated to this process.
let match_or_empty_or_nonexistant = self.current_process.map_or(true, |current_process| {
self.grants
.enter(*current_process, |_, _| current_process == &process_id)
.unwrap_or(true)
});
if match_or_empty_or_nonexistant {
self.current_process.set(process_id);
} else {
return CommandReturn::failure(ErrorCode::NOMEM);
}
match command_num {
1 /* read_write_bytes */ => {
if self.busy.get() {
return CommandReturn::failure(ErrorCode::BUSY);
}
self.grants.enter(process_id, |app, kernel_data| {
let mut mlen = kernel_data
.get_readonly_processbuffer(ro_allow::WRITE)
.map_or(0, |write| write.len());
let rlen = kernel_data
.get_readwrite_processbuffer(rw_allow::READ)
.map_or(mlen, |read| read.len());
mlen = cmp::min(mlen, rlen);
if mlen >= arg1 && arg1 > 0 {
app.len = arg1;
app.index = 0;
self.busy.set(true);
self.do_next_read_write(app, kernel_data);
CommandReturn::success()
} else {
CommandReturn::failure(ErrorCode::INVAL)
}
}).unwrap_or(CommandReturn::failure(ErrorCode::NOMEM))
}
2 /* get chip select */ => {
// Only 0 is supported
CommandReturn::success_u32(0)
}
3 /* set phase */ => {
match match arg1 {
0 => self.spi_slave.set_phase(ClockPhase::SampleLeading),
_ => self.spi_slave.set_phase(ClockPhase::SampleTrailing),
} {
Ok(()) => CommandReturn::success(),
Err(error) => CommandReturn::failure(error.into())
}
}
4 /* get phase */ => {
CommandReturn::success_u32(self.spi_slave.get_phase() as u32)
}
5 /* set polarity */ => {
match match arg1 {
0 => self.spi_slave.set_polarity(ClockPolarity::IdleLow),
_ => self.spi_slave.set_polarity(ClockPolarity::IdleHigh),
} {
Ok(()) => CommandReturn::success(),
Err(error) => CommandReturn::failure(error.into())
}
}
6 /* get polarity */ => {
CommandReturn::success_u32(self.spi_slave.get_polarity() as u32)
}
_ => CommandReturn::failure(ErrorCode::NOSUPPORT)
}
}
fn allocate_grant(&self, processid: ProcessId) -> Result<(), kernel::process::Error> {
self.grants.enter(processid, |_, _| {})
}
}
impl<S: SpiSlaveDevice> SpiSlaveClient for SpiPeripheral<'_, S> {
fn read_write_done(
&self,
writebuf: Option<&'static mut [u8]>,
readbuf: Option<&'static mut [u8]>,
length: usize,
_status: Result<(), ErrorCode>,
) {
self.current_process.map(|process_id| {
let _ = self.grants.enter(*process_id, move |app, kernel_data| {
let rbuf = readbuf.map(|src| {
let index = app.index;
let _ = kernel_data
.get_readwrite_processbuffer(rw_allow::READ)
.and_then(|read| {
read.mut_enter(|dest| {
// Need to be careful that app_read hasn't changed
// under us, so check all values against actual
// slice lengths.
//
// If app_read is shorter than before, and shorter
// than what we have read would require, then truncate.
// -pal 12/9/20
let end = index;
let start = index - length;
let end = cmp::min(end, cmp::min(src.len(), dest.len()));
// If the new endpoint is earlier than our expected
// startpoint, we set the startpoint to be the same;
// This results in a zero-length operation. -pal 12/9/20
let start = cmp::min(start, end);
let dest_area = &dest[start..end];
let real_len = end - start;
for (i, c) in src[0..real_len].iter().enumerate() {
dest_area[i].set(*c);
}
})
});
src
});
self.kernel_read.put(rbuf);
self.kernel_write.put(writebuf);
if app.index == app.len {
self.busy.set(false);
let len = app.len;
app.len = 0;
app.index = 0;
kernel_data.schedule_upcall(0, (len, 0, 0)).ok();
} else {
self.do_next_read_write(app, kernel_data);
}
});
});
}
// Simple callback for when chip has been selected
fn chip_selected(&self) {
self.current_process.map(|process_id| {
let _ = self.grants.enter(*process_id, move |app, kernel_data| {
let len = app.len;
kernel_data.schedule_upcall(1, (len, 0, 0)).ok();
});
});
}
}
| 37.664557 | 97 | 0.515376 |
2268700766121e90ac8d713ffd1f4cc198abf9c3 | 2,275 | use crate::attribute::{Attribute, AttributeCommon};
use crate::character::character_common_data::CharacterCommonData;
use crate::common::item_config_type::{ItemConfig, ItemConfigType};
use crate::common::WeaponType;
use crate::weapon::weapon_base_atk::WeaponBaseATKFamily;
use crate::weapon::weapon_common_data::WeaponCommonData;
use crate::weapon::weapon_effect::WeaponEffect;
use crate::weapon::weapon_static_data::WeaponStaticData;
use crate::weapon::weapon_sub_stat::WeaponSubStatFamily;
use crate::weapon::{WeaponConfig, WeaponName};
use crate::weapon::weapon_trait::WeaponTrait;
pub struct BlackcliffPoleEffect {
stack: f64
}
impl BlackcliffPoleEffect {
pub fn new(config: &WeaponConfig) -> BlackcliffPoleEffect {
match *config {
WeaponConfig::BlackcliffPole { stack } => BlackcliffPoleEffect {
stack
},
_ => BlackcliffPoleEffect {
stack: 0.0
}
}
}
}
impl<T: Attribute> WeaponEffect<T> for BlackcliffPoleEffect {
fn apply(&self, data: &WeaponCommonData, attribute: &mut T) {
let value = (data.refine as f64 * 0.03 + 0.09) * self.stack;
attribute.add_atk_percentage("黑岩刺枪被动等效", value);
}
}
pub struct BlackcliffPole;
impl WeaponTrait for BlackcliffPole {
const META_DATA: WeaponStaticData = WeaponStaticData {
name: WeaponName::BlackcliffPole,
weapon_type: WeaponType::Polearm,
weapon_sub_stat: Some(WeaponSubStatFamily::CriticalDamage120),
weapon_base: WeaponBaseATKFamily::ATK510,
star: 4,
#[cfg(not(target_family = "wasm"))]
effect: Some("乘胜追击:击败敌人后,攻击力提升12%/15%/18%/21%/24%,持续30秒。该效果至多叠加3层,每层持续时间独立。"),
#[cfg(not(target_family = "wasm"))]
chs: "黑岩刺枪"
};
#[cfg(not(target_family = "wasm"))]
const CONFIG_DATA: Option<&'static [ItemConfig]> = Some(&[
ItemConfig {
name: "stack",
title: ItemConfig::DEFAULT_STACK_TITLE,
config: ItemConfigType::Float { min: 0.0, max: 3.0, default: 0.0 }
}
]);
fn get_effect<A: Attribute>(_character: &CharacterCommonData, config: &WeaponConfig) -> Option<Box<dyn WeaponEffect<A>>> {
Some(Box::new(BlackcliffPoleEffect::new(config)))
}
}
| 35 | 126 | 0.664176 |
eb27a66c4ed30b8274aef137a8f2a7414a201f1f | 3,367 | use log::{debug, error, info, warn};
use std::net::{SocketAddr, UdpSocket};
use std::time::Duration;
use crate::{
crypto::derive_symmetric_key,
error::{Error, Result},
message::{recv_from, send_to, ClientMessage, ServerMessage},
};
fn send_and_receive(
msg: &ClientMessage,
sock: &UdpSocket,
server_addr: SocketAddr,
) -> Result<ServerMessage> {
'resend: loop {
info!("sending a message ...");
send_to(msg, &sock, server_addr)?;
info!("waiting for server response ...");
'wait: loop {
match recv_from::<ServerMessage>(&sock) {
Ok((_, src)) if src != server_addr => {
warn!("message from other than the server: {}", src);
continue 'wait;
}
Err(Error::Io(err)) if err.kind() == std::io::ErrorKind::WouldBlock => {
debug!("timeout");
continue 'resend;
}
Err(err) => return Err(err),
Ok((msg, _)) => return Ok(msg),
}
}
}
}
pub fn get_peer_addr(
sock: &UdpSocket,
server_addr: SocketAddr,
psk: &[u8],
) -> Result<(SocketAddr, SocketAddr)> {
info!("psk = {:?}", psk);
sock.set_read_timeout(Duration::from_secs(10).into())?;
let matching_key = {
use ring::digest::{digest, SHA256};
digest(&SHA256, psk).as_ref().to_vec()
};
'main: loop {
let (client_sk, client_pk) = {
use ring::agreement::{EphemeralPrivateKey, X25519};
let rng = ring::rand::SystemRandom::new();
let client_sk = EphemeralPrivateKey::generate(&X25519, &rng)?;
let client_pk = client_sk.compute_public_key()?;
(client_sk, client_pk)
};
let server_pubkey = {
match send_and_receive(&ClientMessage::Hello, &sock, server_addr) {
Ok(ServerMessage::ServerPubKey { pubkey }) => pubkey,
Ok(_) => {
error!("{}", Error::UnexpectedMessage);
continue 'main;
}
Err(err) => {
error!("{}", err);
std::thread::sleep(Duration::from_secs(3));
continue 'main;
}
}
};
let mut symmkey = derive_symmetric_key(client_sk, &server_pubkey, 0)?;
let request = ClientMessage::MatchRequest {
pubkey: client_pk.as_ref().to_vec(),
enc_matching_key: symmkey.encrypt(matching_key.clone())?,
};
let (my_addr, peer_addr) = {
match send_and_receive(&request, &sock, server_addr) {
Ok(ServerMessage::Matched {
enc_your_addr,
enc_peer_addr,
}) => (
symmkey.decrypt(enc_your_addr)?,
symmkey.decrypt(enc_peer_addr)?,
),
Ok(_) => {
error!("{}", Error::UnexpectedMessage);
continue 'main;
}
Err(err) => {
error!("{}", err);
continue 'main;
}
}
};
info!("my addr: {}, peer addr: {}", my_addr, peer_addr);
return Ok((my_addr, peer_addr));
}
}
| 31.764151 | 88 | 0.482625 |
e91d3a32a50cd80f232c580c934189901e22bf16 | 44,775 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ascii;
use borrow::{Cow, Borrow};
use cmp::Ordering;
use error::Error;
use fmt::{self, Write};
use io;
use mem;
use memchr;
use ops;
use os::raw::c_char;
use ptr;
use rc::Rc;
use slice;
use str::{self, Utf8Error};
use sync::Arc;
use sys;
/// A type representing an owned, C-compatible, nul-terminated string with no nul bytes in the
/// middle.
///
/// This type serves the purpose of being able to safely generate a
/// C-compatible string from a Rust byte slice or vector. An instance of this
/// type is a static guarantee that the underlying bytes contain no interior 0
/// bytes ("nul characters") and that the final byte is 0 ("nul terminator").
///
/// `CString` is to [`CStr`] as [`String`] is to [`&str`]: the former
/// in each pair are owned strings; the latter are borrowed
/// references.
///
/// # Creating a `CString`
///
/// A `CString` is created from either a byte slice or a byte vector,
/// or anything that implements [`Into`]`<`[`Vec`]`<`[`u8`]`>>` (for
/// example, you can build a `CString` straight out of a [`String`] or
/// a [`&str`], since both implement that trait).
///
/// The [`new`] method will actually check that the provided `&[u8]`
/// does not have 0 bytes in the middle, and return an error if it
/// finds one.
///
/// # Extracting a raw pointer to the whole C string
///
/// `CString` implements a [`as_ptr`] method through the [`Deref`]
/// trait. This method will give you a `*const c_char` which you can
/// feed directly to extern functions that expect a nul-terminated
/// string, like C's `strdup()`.
///
/// # Extracting a slice of the whole C string
///
/// Alternatively, you can obtain a `&[`[`u8`]`]` slice from a
/// `CString` with the [`as_bytes`] method. Slices produced in this
/// way do *not* contain the trailing nul terminator. This is useful
/// when you will be calling an extern function that takes a `*const
/// u8` argument which is not necessarily nul-terminated, plus another
/// argument with the length of the string — like C's `strndup()`.
/// You can of course get the slice's length with its
/// [`len`][slice.len] method.
///
/// If you need a `&[`[`u8`]`]` slice *with* the nul terminator, you
/// can use [`as_bytes_with_nul`] instead.
///
/// Once you have the kind of slice you need (with or without a nul
/// terminator), you can call the slice's own
/// [`as_ptr`][slice.as_ptr] method to get a raw pointer to pass to
/// extern functions. See the documentation for that function for a
/// discussion on ensuring the lifetime of the raw pointer.
///
/// [`Into`]: ../convert/trait.Into.html
/// [`Vec`]: ../vec/struct.Vec.html
/// [`String`]: ../string/struct.String.html
/// [`&str`]: ../primitive.str.html
/// [`u8`]: ../primitive.u8.html
/// [`new`]: #method.new
/// [`as_bytes`]: #method.as_bytes
/// [`as_bytes_with_nul`]: #method.as_bytes_with_nul
/// [`as_ptr`]: #method.as_ptr
/// [slice.as_ptr]: ../primitive.slice.html#method.as_ptr
/// [slice.len]: ../primitive.slice.html#method.len
/// [`Deref`]: ../ops/trait.Deref.html
/// [`CStr`]: struct.CStr.html
///
/// # Examples
///
/// ```no_run
/// # fn main() {
/// use std::ffi::CString;
/// use std::os::raw::c_char;
///
/// extern {
/// fn my_printer(s: *const c_char);
/// }
///
/// // We are certain that our string doesn't have 0 bytes in the middle,
/// // so we can .unwrap()
/// let c_to_print = CString::new("Hello, world!").unwrap();
/// unsafe {
/// my_printer(c_to_print.as_ptr());
/// }
/// # }
/// ```
///
/// # Safety
///
/// `CString` is intended for working with traditional C-style strings
/// (a sequence of non-nul bytes terminated by a single nul byte); the
/// primary use case for these kinds of strings is interoperating with C-like
/// code. Often you will need to transfer ownership to/from that external
/// code. It is strongly recommended that you thoroughly read through the
/// documentation of `CString` before use, as improper ownership management
/// of `CString` instances can lead to invalid memory accesses, memory leaks,
/// and other memory errors.
#[derive(PartialEq, PartialOrd, Eq, Ord, Hash, Clone)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct CString {
// Invariant 1: the slice ends with a zero byte and has a length of at least one.
// Invariant 2: the slice contains only one zero byte.
// Improper usage of unsafe function can break Invariant 2, but not Invariant 1.
inner: Box<[u8]>,
}
/// Representation of a borrowed C string.
///
/// This type represents a borrowed reference to a nul-terminated
/// array of bytes. It can be constructed safely from a `&[`[`u8`]`]`
/// slice, or unsafely from a raw `*const c_char`. It can then be
/// converted to a Rust [`&str`] by performing UTF-8 validation, or
/// into an owned [`CString`].
///
/// `CStr` is to [`CString`] as [`&str`] is to [`String`]: the former
/// in each pair are borrowed references; the latter are owned
/// strings.
///
/// Note that this structure is **not** `repr(C)` and is not recommended to be
/// placed in the signatures of FFI functions. Instead, safe wrappers of FFI
/// functions may leverage the unsafe [`from_ptr`] constructor to provide a safe
/// interface to other consumers.
///
/// # Examples
///
/// Inspecting a foreign C string:
///
/// ```no_run
/// use std::ffi::CStr;
/// use std::os::raw::c_char;
///
/// extern { fn my_string() -> *const c_char; }
///
/// unsafe {
/// let slice = CStr::from_ptr(my_string());
/// println!("string buffer size without nul terminator: {}", slice.to_bytes().len());
/// }
/// ```
///
/// Passing a Rust-originating C string:
///
/// ```no_run
/// use std::ffi::{CString, CStr};
/// use std::os::raw::c_char;
///
/// fn work(data: &CStr) {
/// extern { fn work_with(data: *const c_char); }
///
/// unsafe { work_with(data.as_ptr()) }
/// }
///
/// let s = CString::new("data data data data").unwrap();
/// work(&s);
/// ```
///
/// Converting a foreign C string into a Rust [`String`]:
///
/// ```no_run
/// use std::ffi::CStr;
/// use std::os::raw::c_char;
///
/// extern { fn my_string() -> *const c_char; }
///
/// fn my_string_safe() -> String {
/// unsafe {
/// CStr::from_ptr(my_string()).to_string_lossy().into_owned()
/// }
/// }
///
/// println!("string: {}", my_string_safe());
/// ```
///
/// [`u8`]: ../primitive.u8.html
/// [`&str`]: ../primitive.str.html
/// [`String`]: ../string/struct.String.html
/// [`CString`]: struct.CString.html
/// [`from_ptr`]: #method.from_ptr
#[derive(Hash)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct CStr {
// FIXME: this should not be represented with a DST slice but rather with
// just a raw `c_char` along with some form of marker to make
// this an unsized type. Essentially `sizeof(&CStr)` should be the
// same as `sizeof(&c_char)` but `CStr` should be an unsized type.
inner: [c_char]
}
/// An error indicating that an interior nul byte was found.
///
/// While Rust strings may contain nul bytes in the middle, C strings
/// can't, as that byte would effectively truncate the string.
///
/// This error is created by the [`new`][`CString::new`] method on
/// [`CString`]. See its documentation for more.
///
/// [`CString`]: struct.CString.html
/// [`CString::new`]: struct.CString.html#method.new
///
/// # Examples
///
/// ```
/// use std::ffi::{CString, NulError};
///
/// let _: NulError = CString::new(b"f\0oo".to_vec()).unwrap_err();
/// ```
#[derive(Clone, PartialEq, Eq, Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct NulError(usize, Vec<u8>);
/// An error indicating that a nul byte was not in the expected position.
///
/// The slice used to create a [`CStr`] must have one and only one nul
/// byte at the end of the slice.
///
/// This error is created by the
/// [`from_bytes_with_nul`][`CStr::from_bytes_with_nul`] method on
/// [`CStr`]. See its documentation for more.
///
/// [`CStr`]: struct.CStr.html
/// [`CStr::from_bytes_with_nul`]: struct.CStr.html#method.from_bytes_with_nul
///
/// # Examples
///
/// ```
/// use std::ffi::{CStr, FromBytesWithNulError};
///
/// let _: FromBytesWithNulError = CStr::from_bytes_with_nul(b"f\0oo").unwrap_err();
/// ```
#[derive(Clone, PartialEq, Eq, Debug)]
#[stable(feature = "cstr_from_bytes", since = "1.10.0")]
pub struct FromBytesWithNulError {
kind: FromBytesWithNulErrorKind,
}
#[derive(Clone, PartialEq, Eq, Debug)]
enum FromBytesWithNulErrorKind {
InteriorNul(usize),
NotNulTerminated,
}
impl FromBytesWithNulError {
fn interior_nul(pos: usize) -> FromBytesWithNulError {
FromBytesWithNulError {
kind: FromBytesWithNulErrorKind::InteriorNul(pos),
}
}
fn not_nul_terminated() -> FromBytesWithNulError {
FromBytesWithNulError {
kind: FromBytesWithNulErrorKind::NotNulTerminated,
}
}
}
/// An error indicating invalid UTF-8 when converting a [`CString`] into a [`String`].
///
/// `CString` is just a wrapper over a buffer of bytes with a nul
/// terminator; [`into_string`][`CString::into_string`] performs UTF-8
/// validation on those bytes and may return this error.
///
/// This `struct` is created by the
/// [`into_string`][`CString::into_string`] method on [`CString`]. See
/// its documentation for more.
///
/// [`String`]: ../string/struct.String.html
/// [`CString`]: struct.CString.html
/// [`CString::into_string`]: struct.CString.html#method.into_string
#[derive(Clone, PartialEq, Eq, Debug)]
#[stable(feature = "cstring_into", since = "1.7.0")]
pub struct IntoStringError {
inner: CString,
error: Utf8Error,
}
impl CString {
/// Creates a new C-compatible string from a container of bytes.
///
/// This function will consume the provided data and use the
/// underlying bytes to construct a new string, ensuring that
/// there is a trailing 0 byte. This trailing 0 byte will be
/// appended by this function; the provided data should *not*
/// contain any 0 bytes in it.
///
/// # Examples
///
/// ```no_run
/// use std::ffi::CString;
/// use std::os::raw::c_char;
///
/// extern { fn puts(s: *const c_char); }
///
/// let to_print = CString::new("Hello!").unwrap();
/// unsafe {
/// puts(to_print.as_ptr());
/// }
/// ```
///
/// # Errors
///
/// This function will return an error if the supplied bytes contain an
/// internal 0 byte. The [`NulError`] returned will contain the bytes as well as
/// the position of the nul byte.
///
/// [`NulError`]: struct.NulError.html
#[stable(feature = "rust1", since = "1.0.0")]
pub fn new<T: Into<Vec<u8>>>(t: T) -> Result<CString, NulError> {
Self::_new(t.into())
}
fn _new(bytes: Vec<u8>) -> Result<CString, NulError> {
match memchr::memchr(0, &bytes) {
Some(i) => Err(NulError(i, bytes)),
None => Ok(unsafe { CString::from_vec_unchecked(bytes) }),
}
}
/// Creates a C-compatible string by consuming a byte vector,
/// without checking for interior 0 bytes.
///
/// This method is equivalent to [`new`] except that no runtime assertion
/// is made that `v` contains no 0 bytes, and it requires an actual
/// byte vector, not anything that can be converted to one with Into.
///
/// [`new`]: #method.new
///
/// # Examples
///
/// ```
/// use std::ffi::CString;
///
/// let raw = b"foo".to_vec();
/// unsafe {
/// let c_string = CString::from_vec_unchecked(raw);
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_vec_unchecked(mut v: Vec<u8>) -> CString {
v.reserve_exact(1);
v.push(0);
CString { inner: v.into_boxed_slice() }
}
/// Retakes ownership of a `CString` that was transferred to C via [`into_raw`].
///
/// Additionally, the length of the string will be recalculated from the pointer.
///
/// # Safety
///
/// This should only ever be called with a pointer that was earlier
/// obtained by calling [`into_raw`] on a `CString`. Other usage (e.g. trying to take
/// ownership of a string that was allocated by foreign code) is likely to lead
/// to undefined behavior or allocator corruption.
///
/// > **Note:** If you need to borrow a string that was allocated by
/// > foreign code, use [`CStr`]. If you need to take ownership of
/// > a string that was allocated by foreign code, you will need to
/// > make your own provisions for freeing it appropriately, likely
/// > with the foreign code's API to do that.
///
/// [`into_raw`]: #method.into_raw
/// [`CStr`]: struct.CStr.html
///
/// # Examples
///
/// Create a `CString`, pass ownership to an `extern` function (via raw pointer), then retake
/// ownership with `from_raw`:
///
/// ```no_run
/// use std::ffi::CString;
/// use std::os::raw::c_char;
///
/// extern {
/// fn some_extern_function(s: *mut c_char);
/// }
///
/// let c_string = CString::new("Hello!").unwrap();
/// let raw = c_string.into_raw();
/// unsafe {
/// some_extern_function(raw);
/// let c_string = CString::from_raw(raw);
/// }
/// ```
#[stable(feature = "cstr_memory", since = "1.4.0")]
pub unsafe fn from_raw(ptr: *mut c_char) -> CString {
let len = sys::strlen(ptr) + 1; // Including the NUL byte
let slice = slice::from_raw_parts_mut(ptr, len as usize);
CString { inner: Box::from_raw(slice as *mut [c_char] as *mut [u8]) }
}
/// Consumes the `CString` and transfers ownership of the string to a C caller.
///
/// The pointer which this function returns must be returned to Rust and reconstituted using
/// [`from_raw`] to be properly deallocated. Specifically, one
/// should *not* use the standard C `free()` function to deallocate
/// this string.
///
/// Failure to call [`from_raw`] will lead to a memory leak.
///
/// [`from_raw`]: #method.from_raw
///
/// # Examples
///
/// ```
/// use std::ffi::CString;
///
/// let c_string = CString::new("foo").unwrap();
///
/// let ptr = c_string.into_raw();
///
/// unsafe {
/// assert_eq!(b'f', *ptr as u8);
/// assert_eq!(b'o', *ptr.offset(1) as u8);
/// assert_eq!(b'o', *ptr.offset(2) as u8);
/// assert_eq!(b'\0', *ptr.offset(3) as u8);
///
/// // retake pointer to free memory
/// let _ = CString::from_raw(ptr);
/// }
/// ```
#[inline]
#[stable(feature = "cstr_memory", since = "1.4.0")]
pub fn into_raw(self) -> *mut c_char {
Box::into_raw(self.into_inner()) as *mut c_char
}
/// Converts the `CString` into a [`String`] if it contains valid UTF-8 data.
///
/// On failure, ownership of the original `CString` is returned.
///
/// [`String`]: ../string/struct.String.html
///
/// # Examples
///
/// ```
/// use std::ffi::CString;
///
/// let valid_utf8 = vec![b'f', b'o', b'o'];
/// let cstring = CString::new(valid_utf8).unwrap();
/// assert_eq!(cstring.into_string().unwrap(), "foo");
///
/// let invalid_utf8 = vec![b'f', 0xff, b'o', b'o'];
/// let cstring = CString::new(invalid_utf8).unwrap();
/// let err = cstring.into_string().err().unwrap();
/// assert_eq!(err.utf8_error().valid_up_to(), 1);
/// ```
#[stable(feature = "cstring_into", since = "1.7.0")]
pub fn into_string(self) -> Result<String, IntoStringError> {
String::from_utf8(self.into_bytes())
.map_err(|e| IntoStringError {
error: e.utf8_error(),
inner: unsafe { CString::from_vec_unchecked(e.into_bytes()) },
})
}
/// Consumes the `CString` and returns the underlying byte buffer.
///
/// The returned buffer does **not** contain the trailing nul
/// terminator, and it is guaranteed to not have any interior nul
/// bytes.
///
/// # Examples
///
/// ```
/// use std::ffi::CString;
///
/// let c_string = CString::new("foo").unwrap();
/// let bytes = c_string.into_bytes();
/// assert_eq!(bytes, vec![b'f', b'o', b'o']);
/// ```
#[stable(feature = "cstring_into", since = "1.7.0")]
pub fn into_bytes(self) -> Vec<u8> {
let mut vec = self.into_inner().into_vec();
let _nul = vec.pop();
debug_assert_eq!(_nul, Some(0u8));
vec
}
/// Equivalent to the [`into_bytes`] function except that the returned vector
/// includes the trailing nul terminator.
///
/// [`into_bytes`]: #method.into_bytes
///
/// # Examples
///
/// ```
/// use std::ffi::CString;
///
/// let c_string = CString::new("foo").unwrap();
/// let bytes = c_string.into_bytes_with_nul();
/// assert_eq!(bytes, vec![b'f', b'o', b'o', b'\0']);
/// ```
#[stable(feature = "cstring_into", since = "1.7.0")]
pub fn into_bytes_with_nul(self) -> Vec<u8> {
self.into_inner().into_vec()
}
/// Returns the contents of this `CString` as a slice of bytes.
///
/// The returned slice does **not** contain the trailing nul
/// terminator, and it is guaranteed to not have any interior nul
/// bytes. If you need the nul terminator, use
/// [`as_bytes_with_nul`] instead.
///
/// [`as_bytes_with_nul`]: #method.as_bytes_with_nul
///
/// # Examples
///
/// ```
/// use std::ffi::CString;
///
/// let c_string = CString::new("foo").unwrap();
/// let bytes = c_string.as_bytes();
/// assert_eq!(bytes, &[b'f', b'o', b'o']);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn as_bytes(&self) -> &[u8] {
&self.inner[..self.inner.len() - 1]
}
/// Equivalent to the [`as_bytes`] function except that the returned slice
/// includes the trailing nul terminator.
///
/// [`as_bytes`]: #method.as_bytes
///
/// # Examples
///
/// ```
/// use std::ffi::CString;
///
/// let c_string = CString::new("foo").unwrap();
/// let bytes = c_string.as_bytes_with_nul();
/// assert_eq!(bytes, &[b'f', b'o', b'o', b'\0']);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn as_bytes_with_nul(&self) -> &[u8] {
&self.inner
}
/// Extracts a [`CStr`] slice containing the entire string.
///
/// [`CStr`]: struct.CStr.html
///
/// # Examples
///
/// ```
/// use std::ffi::{CString, CStr};
///
/// let c_string = CString::new(b"foo".to_vec()).unwrap();
/// let c_str = c_string.as_c_str();
/// assert_eq!(c_str, CStr::from_bytes_with_nul(b"foo\0").unwrap());
/// ```
#[inline]
#[stable(feature = "as_c_str", since = "1.20.0")]
pub fn as_c_str(&self) -> &CStr {
&*self
}
/// Converts this `CString` into a boxed [`CStr`].
///
/// [`CStr`]: struct.CStr.html
///
/// # Examples
///
/// ```
/// use std::ffi::{CString, CStr};
///
/// let c_string = CString::new(b"foo".to_vec()).unwrap();
/// let boxed = c_string.into_boxed_c_str();
/// assert_eq!(&*boxed, CStr::from_bytes_with_nul(b"foo\0").unwrap());
/// ```
#[stable(feature = "into_boxed_c_str", since = "1.20.0")]
pub fn into_boxed_c_str(self) -> Box<CStr> {
unsafe { Box::from_raw(Box::into_raw(self.into_inner()) as *mut CStr) }
}
// Bypass "move out of struct which implements [`Drop`] trait" restriction.
///
/// [`Drop`]: ../ops/trait.Drop.html
fn into_inner(self) -> Box<[u8]> {
unsafe {
let result = ptr::read(&self.inner);
mem::forget(self);
result
}
}
}
// Turns this `CString` into an empty string to prevent
// memory unsafe code from working by accident. Inline
// to prevent LLVM from optimizing it away in debug builds.
#[stable(feature = "cstring_drop", since = "1.13.0")]
impl Drop for CString {
#[inline]
fn drop(&mut self) {
unsafe { *self.inner.get_unchecked_mut(0) = 0; }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl ops::Deref for CString {
type Target = CStr;
#[inline]
fn deref(&self) -> &CStr {
unsafe { CStr::from_bytes_with_nul_unchecked(self.as_bytes_with_nul()) }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Debug for CString {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
#[stable(feature = "cstring_into", since = "1.7.0")]
impl From<CString> for Vec<u8> {
#[inline]
fn from(s: CString) -> Vec<u8> {
s.into_bytes()
}
}
#[stable(feature = "cstr_debug", since = "1.3.0")]
impl fmt::Debug for CStr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "\"")?;
for byte in self.to_bytes().iter().flat_map(|&b| ascii::escape_default(b)) {
f.write_char(byte as char)?;
}
write!(f, "\"")
}
}
#[stable(feature = "cstr_default", since = "1.10.0")]
impl<'a> Default for &'a CStr {
fn default() -> &'a CStr {
const SLICE: &'static [c_char] = &[0];
unsafe { CStr::from_ptr(SLICE.as_ptr()) }
}
}
#[stable(feature = "cstr_default", since = "1.10.0")]
impl Default for CString {
/// Creates an empty `CString`.
fn default() -> CString {
let a: &CStr = Default::default();
a.to_owned()
}
}
#[stable(feature = "cstr_borrow", since = "1.3.0")]
impl Borrow<CStr> for CString {
#[inline]
fn borrow(&self) -> &CStr { self }
}
#[stable(feature = "box_from_c_str", since = "1.17.0")]
impl<'a> From<&'a CStr> for Box<CStr> {
fn from(s: &'a CStr) -> Box<CStr> {
let boxed: Box<[u8]> = Box::from(s.to_bytes_with_nul());
unsafe { Box::from_raw(Box::into_raw(boxed) as *mut CStr) }
}
}
#[stable(feature = "c_string_from_box", since = "1.18.0")]
impl From<Box<CStr>> for CString {
#[inline]
fn from(s: Box<CStr>) -> CString {
s.into_c_string()
}
}
#[stable(feature = "box_from_c_string", since = "1.20.0")]
impl From<CString> for Box<CStr> {
#[inline]
fn from(s: CString) -> Box<CStr> {
s.into_boxed_c_str()
}
}
#[stable(feature = "shared_from_slice2", since = "1.24.0")]
impl From<CString> for Arc<CStr> {
#[inline]
fn from(s: CString) -> Arc<CStr> {
let arc: Arc<[u8]> = Arc::from(s.into_inner());
unsafe { Arc::from_raw(Arc::into_raw(arc) as *const CStr) }
}
}
#[stable(feature = "shared_from_slice2", since = "1.24.0")]
impl<'a> From<&'a CStr> for Arc<CStr> {
#[inline]
fn from(s: &CStr) -> Arc<CStr> {
let arc: Arc<[u8]> = Arc::from(s.to_bytes_with_nul());
unsafe { Arc::from_raw(Arc::into_raw(arc) as *const CStr) }
}
}
#[stable(feature = "shared_from_slice2", since = "1.24.0")]
impl From<CString> for Rc<CStr> {
#[inline]
fn from(s: CString) -> Rc<CStr> {
let rc: Rc<[u8]> = Rc::from(s.into_inner());
unsafe { Rc::from_raw(Rc::into_raw(rc) as *const CStr) }
}
}
#[stable(feature = "shared_from_slice2", since = "1.24.0")]
impl<'a> From<&'a CStr> for Rc<CStr> {
#[inline]
fn from(s: &CStr) -> Rc<CStr> {
let rc: Rc<[u8]> = Rc::from(s.to_bytes_with_nul());
unsafe { Rc::from_raw(Rc::into_raw(rc) as *const CStr) }
}
}
#[stable(feature = "default_box_extra", since = "1.17.0")]
impl Default for Box<CStr> {
fn default() -> Box<CStr> {
let boxed: Box<[u8]> = Box::from([0]);
unsafe { Box::from_raw(Box::into_raw(boxed) as *mut CStr) }
}
}
impl NulError {
/// Returns the position of the nul byte in the slice that caused
/// [`CString::new`] to fail.
///
/// [`CString::new`]: struct.CString.html#method.new
///
/// # Examples
///
/// ```
/// use std::ffi::CString;
///
/// let nul_error = CString::new("foo\0bar").unwrap_err();
/// assert_eq!(nul_error.nul_position(), 3);
///
/// let nul_error = CString::new("foo bar\0").unwrap_err();
/// assert_eq!(nul_error.nul_position(), 7);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn nul_position(&self) -> usize { self.0 }
/// Consumes this error, returning the underlying vector of bytes which
/// generated the error in the first place.
///
/// # Examples
///
/// ```
/// use std::ffi::CString;
///
/// let nul_error = CString::new("foo\0bar").unwrap_err();
/// assert_eq!(nul_error.into_vec(), b"foo\0bar");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn into_vec(self) -> Vec<u8> { self.1 }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Error for NulError {
fn description(&self) -> &str { "nul byte found in data" }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Display for NulError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "nul byte found in provided data at position: {}", self.0)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl From<NulError> for io::Error {
fn from(_: NulError) -> io::Error {
io::Error::new(io::ErrorKind::InvalidInput,
"data provided contains a nul byte")
}
}
#[stable(feature = "frombyteswithnulerror_impls", since = "1.17.0")]
impl Error for FromBytesWithNulError {
fn description(&self) -> &str {
match self.kind {
FromBytesWithNulErrorKind::InteriorNul(..) =>
"data provided contains an interior nul byte",
FromBytesWithNulErrorKind::NotNulTerminated =>
"data provided is not nul terminated",
}
}
}
#[stable(feature = "frombyteswithnulerror_impls", since = "1.17.0")]
impl fmt::Display for FromBytesWithNulError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(self.description())?;
if let FromBytesWithNulErrorKind::InteriorNul(pos) = self.kind {
write!(f, " at byte pos {}", pos)?;
}
Ok(())
}
}
impl IntoStringError {
/// Consumes this error, returning original [`CString`] which generated the
/// error.
///
/// [`CString`]: struct.CString.html
#[stable(feature = "cstring_into", since = "1.7.0")]
pub fn into_cstring(self) -> CString {
self.inner
}
/// Access the underlying UTF-8 error that was the cause of this error.
#[stable(feature = "cstring_into", since = "1.7.0")]
pub fn utf8_error(&self) -> Utf8Error {
self.error
}
}
#[stable(feature = "cstring_into", since = "1.7.0")]
impl Error for IntoStringError {
fn description(&self) -> &str {
"C string contained non-utf8 bytes"
}
fn cause(&self) -> Option<&Error> {
Some(&self.error)
}
}
#[stable(feature = "cstring_into", since = "1.7.0")]
impl fmt::Display for IntoStringError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.description().fmt(f)
}
}
impl CStr {
/// Wraps a raw C string with a safe C string wrapper.
///
/// This function will wrap the provided `ptr` with a `CStr` wrapper, which
/// allows inspection and interoperation of non-owned C strings. This method
/// is unsafe for a number of reasons:
///
/// * There is no guarantee to the validity of `ptr`.
/// * The returned lifetime is not guaranteed to be the actual lifetime of
/// `ptr`.
/// * There is no guarantee that the memory pointed to by `ptr` contains a
/// valid nul terminator byte at the end of the string.
///
/// > **Note**: This operation is intended to be a 0-cost cast but it is
/// > currently implemented with an up-front calculation of the length of
/// > the string. This is not guaranteed to always be the case.
///
/// # Examples
///
/// ```no_run
/// # fn main() {
/// use std::ffi::CStr;
/// use std::os::raw::c_char;
///
/// extern {
/// fn my_string() -> *const c_char;
/// }
///
/// unsafe {
/// let slice = CStr::from_ptr(my_string());
/// println!("string returned: {}", slice.to_str().unwrap());
/// }
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_ptr<'a>(ptr: *const c_char) -> &'a CStr {
let len = sys::strlen(ptr);
let ptr = ptr as *const u8;
CStr::from_bytes_with_nul_unchecked(slice::from_raw_parts(ptr, len as usize + 1))
}
/// Creates a C string wrapper from a byte slice.
///
/// This function will cast the provided `bytes` to a `CStr`
/// wrapper after ensuring that the byte slice is nul-terminated
/// and does not contain any interior nul bytes.
///
/// # Examples
///
/// ```
/// use std::ffi::CStr;
///
/// let cstr = CStr::from_bytes_with_nul(b"hello\0");
/// assert!(cstr.is_ok());
/// ```
///
/// Creating a `CStr` without a trailing nul terminator is an error:
///
/// ```
/// use std::ffi::CStr;
///
/// let c_str = CStr::from_bytes_with_nul(b"hello");
/// assert!(c_str.is_err());
/// ```
///
/// Creating a `CStr` with an interior nul byte is an error:
///
/// ```
/// use std::ffi::CStr;
///
/// let c_str = CStr::from_bytes_with_nul(b"he\0llo\0");
/// assert!(c_str.is_err());
/// ```
#[stable(feature = "cstr_from_bytes", since = "1.10.0")]
pub fn from_bytes_with_nul(bytes: &[u8])
-> Result<&CStr, FromBytesWithNulError> {
let nul_pos = memchr::memchr(0, bytes);
if let Some(nul_pos) = nul_pos {
if nul_pos + 1 != bytes.len() {
return Err(FromBytesWithNulError::interior_nul(nul_pos));
}
Ok(unsafe { CStr::from_bytes_with_nul_unchecked(bytes) })
} else {
Err(FromBytesWithNulError::not_nul_terminated())
}
}
/// Unsafely creates a C string wrapper from a byte slice.
///
/// This function will cast the provided `bytes` to a `CStr` wrapper without
/// performing any sanity checks. The provided slice **must** be nul-terminated
/// and not contain any interior nul bytes.
///
/// # Examples
///
/// ```
/// use std::ffi::{CStr, CString};
///
/// unsafe {
/// let cstring = CString::new("hello").unwrap();
/// let cstr = CStr::from_bytes_with_nul_unchecked(cstring.to_bytes_with_nul());
/// assert_eq!(cstr, &*cstring);
/// }
/// ```
#[inline]
#[stable(feature = "cstr_from_bytes", since = "1.10.0")]
pub unsafe fn from_bytes_with_nul_unchecked(bytes: &[u8]) -> &CStr {
&*(bytes as *const [u8] as *const CStr)
}
/// Returns the inner pointer to this C string.
///
/// The returned pointer will be valid for as long as `self` is, and points
/// to a contiguous region of memory terminated with a 0 byte to represent
/// the end of the string.
///
/// **WARNING**
///
/// It is your responsibility to make sure that the underlying memory is not
/// freed too early. For example, the following code will cause undefined
/// behavior when `ptr` is used inside the `unsafe` block:
///
/// ```no_run
/// use std::ffi::{CString};
///
/// let ptr = CString::new("Hello").unwrap().as_ptr();
/// unsafe {
/// // `ptr` is dangling
/// *ptr;
/// }
/// ```
///
/// This happens because the pointer returned by `as_ptr` does not carry any
/// lifetime information and the [`CString`] is deallocated immediately after
/// the `CString::new("Hello").unwrap().as_ptr()` expression is evaluated.
/// To fix the problem, bind the `CString` to a local variable:
///
/// ```no_run
/// use std::ffi::{CString};
///
/// let hello = CString::new("Hello").unwrap();
/// let ptr = hello.as_ptr();
/// unsafe {
/// // `ptr` is valid because `hello` is in scope
/// *ptr;
/// }
/// ```
///
/// This way, the lifetime of the `CString` in `hello` encompasses
/// the lifetime of `ptr` and the `unsafe` block.
///
/// [`CString`]: struct.CString.html
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn as_ptr(&self) -> *const c_char {
self.inner.as_ptr()
}
/// Converts this C string to a byte slice.
///
/// The returned slice will **not** contain the trailing nul terminator that this C
/// string has.
///
/// > **Note**: This method is currently implemented as a constant-time
/// > cast, but it is planned to alter its definition in the future to
/// > perform the length calculation whenever this method is called.
///
/// # Examples
///
/// ```
/// use std::ffi::CStr;
///
/// let c_str = CStr::from_bytes_with_nul(b"foo\0").unwrap();
/// assert_eq!(c_str.to_bytes(), b"foo");
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn to_bytes(&self) -> &[u8] {
let bytes = self.to_bytes_with_nul();
&bytes[..bytes.len() - 1]
}
/// Converts this C string to a byte slice containing the trailing 0 byte.
///
/// This function is the equivalent of [`to_bytes`] except that it will retain
/// the trailing nul terminator instead of chopping it off.
///
/// > **Note**: This method is currently implemented as a 0-cost cast, but
/// > it is planned to alter its definition in the future to perform the
/// > length calculation whenever this method is called.
///
/// [`to_bytes`]: #method.to_bytes
///
/// # Examples
///
/// ```
/// use std::ffi::CStr;
///
/// let c_str = CStr::from_bytes_with_nul(b"foo\0").unwrap();
/// assert_eq!(c_str.to_bytes_with_nul(), b"foo\0");
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn to_bytes_with_nul(&self) -> &[u8] {
unsafe { &*(&self.inner as *const [c_char] as *const [u8]) }
}
/// Yields a [`&str`] slice if the `CStr` contains valid UTF-8.
///
/// If the contents of the `CStr` are valid UTF-8 data, this
/// function will return the corresponding [`&str`] slice. Otherwise,
/// it will return an error with details of where UTF-8 validation failed.
///
/// > **Note**: This method is currently implemented to check for validity
/// > after a constant-time cast, but it is planned to alter its definition
/// > in the future to perform the length calculation in addition to the
/// > UTF-8 check whenever this method is called.
///
/// [`&str`]: ../primitive.str.html
///
/// # Examples
///
/// ```
/// use std::ffi::CStr;
///
/// let c_str = CStr::from_bytes_with_nul(b"foo\0").unwrap();
/// assert_eq!(c_str.to_str(), Ok("foo"));
/// ```
#[stable(feature = "cstr_to_str", since = "1.4.0")]
pub fn to_str(&self) -> Result<&str, str::Utf8Error> {
// NB: When CStr is changed to perform the length check in .to_bytes()
// instead of in from_ptr(), it may be worth considering if this should
// be rewritten to do the UTF-8 check inline with the length calculation
// instead of doing it afterwards.
str::from_utf8(self.to_bytes())
}
/// Converts a `CStr` into a [`Cow`]`<`[`str`]`>`.
///
/// If the contents of the `CStr` are valid UTF-8 data, this
/// function will return a [`Cow`]`::`[`Borrowed`]`(`[`&str`]`)`
/// with the the corresponding [`&str`] slice. Otherwise, it will
/// replace any invalid UTF-8 sequences with `U+FFFD REPLACEMENT
/// CHARACTER` and return a [`Cow`]`::`[`Owned`]`(`[`String`]`)`
/// with the result.
///
/// > **Note**: This method is currently implemented to check for validity
/// > after a constant-time cast, but it is planned to alter its definition
/// > in the future to perform the length calculation in addition to the
/// > UTF-8 check whenever this method is called.
///
/// [`Cow`]: ../borrow/enum.Cow.html
/// [`Borrowed`]: ../borrow/enum.Cow.html#variant.Borrowed
/// [`str`]: ../primitive.str.html
/// [`String`]: ../string/struct.String.html
///
/// # Examples
///
/// Calling `to_string_lossy` on a `CStr` containing valid UTF-8:
///
/// ```
/// use std::borrow::Cow;
/// use std::ffi::CStr;
///
/// let c_str = CStr::from_bytes_with_nul(b"Hello World\0").unwrap();
/// assert_eq!(c_str.to_string_lossy(), Cow::Borrowed("Hello World"));
/// ```
///
/// Calling `to_string_lossy` on a `CStr` containing invalid UTF-8:
///
/// ```
/// use std::borrow::Cow;
/// use std::ffi::CStr;
///
/// let c_str = CStr::from_bytes_with_nul(b"Hello \xF0\x90\x80World\0").unwrap();
/// assert_eq!(
/// c_str.to_string_lossy(),
/// Cow::Owned(String::from("Hello �World")) as Cow<str>
/// );
/// ```
#[stable(feature = "cstr_to_str", since = "1.4.0")]
pub fn to_string_lossy(&self) -> Cow<str> {
String::from_utf8_lossy(self.to_bytes())
}
/// Converts a [`Box`]`<CStr>` into a [`CString`] without copying or allocating.
///
/// [`Box`]: ../boxed/struct.Box.html
/// [`CString`]: struct.CString.html
///
/// # Examples
///
/// ```
/// use std::ffi::CString;
///
/// let c_string = CString::new(b"foo".to_vec()).unwrap();
/// let boxed = c_string.into_boxed_c_str();
/// assert_eq!(boxed.into_c_string(), CString::new("foo").unwrap());
/// ```
#[stable(feature = "into_boxed_c_str", since = "1.20.0")]
pub fn into_c_string(self: Box<CStr>) -> CString {
let raw = Box::into_raw(self) as *mut [u8];
CString { inner: unsafe { Box::from_raw(raw) } }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl PartialEq for CStr {
fn eq(&self, other: &CStr) -> bool {
self.to_bytes().eq(other.to_bytes())
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Eq for CStr {}
#[stable(feature = "rust1", since = "1.0.0")]
impl PartialOrd for CStr {
fn partial_cmp(&self, other: &CStr) -> Option<Ordering> {
self.to_bytes().partial_cmp(&other.to_bytes())
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Ord for CStr {
fn cmp(&self, other: &CStr) -> Ordering {
self.to_bytes().cmp(&other.to_bytes())
}
}
#[stable(feature = "cstr_borrow", since = "1.3.0")]
impl ToOwned for CStr {
type Owned = CString;
fn to_owned(&self) -> CString {
CString { inner: self.to_bytes_with_nul().into() }
}
}
#[stable(feature = "cstring_asref", since = "1.7.0")]
impl<'a> From<&'a CStr> for CString {
fn from(s: &'a CStr) -> CString {
s.to_owned()
}
}
#[stable(feature = "cstring_asref", since = "1.7.0")]
impl ops::Index<ops::RangeFull> for CString {
type Output = CStr;
#[inline]
fn index(&self, _index: ops::RangeFull) -> &CStr {
self
}
}
#[stable(feature = "cstring_asref", since = "1.7.0")]
impl AsRef<CStr> for CStr {
#[inline]
fn as_ref(&self) -> &CStr {
self
}
}
#[stable(feature = "cstring_asref", since = "1.7.0")]
impl AsRef<CStr> for CString {
#[inline]
fn as_ref(&self) -> &CStr {
self
}
}
#[cfg(test)]
mod tests {
use super::*;
use os::raw::c_char;
use borrow::Cow::{Borrowed, Owned};
use hash::{Hash, Hasher};
use collections::hash_map::DefaultHasher;
use rc::Rc;
use sync::Arc;
#[test]
fn c_to_rust() {
let data = b"123\0";
let ptr = data.as_ptr() as *const c_char;
unsafe {
assert_eq!(CStr::from_ptr(ptr).to_bytes(), b"123");
assert_eq!(CStr::from_ptr(ptr).to_bytes_with_nul(), b"123\0");
}
}
#[test]
fn simple() {
let s = CString::new("1234").unwrap();
assert_eq!(s.as_bytes(), b"1234");
assert_eq!(s.as_bytes_with_nul(), b"1234\0");
}
#[test]
fn build_with_zero1() {
assert!(CString::new(&b"\0"[..]).is_err());
}
#[test]
fn build_with_zero2() {
assert!(CString::new(vec![0]).is_err());
}
#[test]
fn build_with_zero3() {
unsafe {
let s = CString::from_vec_unchecked(vec![0]);
assert_eq!(s.as_bytes(), b"\0");
}
}
#[test]
fn formatted() {
let s = CString::new(&b"abc\x01\x02\n\xE2\x80\xA6\xFF"[..]).unwrap();
assert_eq!(format!("{:?}", s), r#""abc\x01\x02\n\xe2\x80\xa6\xff""#);
}
#[test]
fn borrowed() {
unsafe {
let s = CStr::from_ptr(b"12\0".as_ptr() as *const _);
assert_eq!(s.to_bytes(), b"12");
assert_eq!(s.to_bytes_with_nul(), b"12\0");
}
}
#[test]
fn to_str() {
let data = b"123\xE2\x80\xA6\0";
let ptr = data.as_ptr() as *const c_char;
unsafe {
assert_eq!(CStr::from_ptr(ptr).to_str(), Ok("123…"));
assert_eq!(CStr::from_ptr(ptr).to_string_lossy(), Borrowed("123…"));
}
let data = b"123\xE2\0";
let ptr = data.as_ptr() as *const c_char;
unsafe {
assert!(CStr::from_ptr(ptr).to_str().is_err());
assert_eq!(CStr::from_ptr(ptr).to_string_lossy(), Owned::<str>(format!("123\u{FFFD}")));
}
}
#[test]
fn to_owned() {
let data = b"123\0";
let ptr = data.as_ptr() as *const c_char;
let owned = unsafe { CStr::from_ptr(ptr).to_owned() };
assert_eq!(owned.as_bytes_with_nul(), data);
}
#[test]
fn equal_hash() {
let data = b"123\xE2\xFA\xA6\0";
let ptr = data.as_ptr() as *const c_char;
let cstr: &'static CStr = unsafe { CStr::from_ptr(ptr) };
let mut s = DefaultHasher::new();
cstr.hash(&mut s);
let cstr_hash = s.finish();
let mut s = DefaultHasher::new();
CString::new(&data[..data.len() - 1]).unwrap().hash(&mut s);
let cstring_hash = s.finish();
assert_eq!(cstr_hash, cstring_hash);
}
#[test]
fn from_bytes_with_nul() {
let data = b"123\0";
let cstr = CStr::from_bytes_with_nul(data);
assert_eq!(cstr.map(CStr::to_bytes), Ok(&b"123"[..]));
let cstr = CStr::from_bytes_with_nul(data);
assert_eq!(cstr.map(CStr::to_bytes_with_nul), Ok(&b"123\0"[..]));
unsafe {
let cstr = CStr::from_bytes_with_nul(data);
let cstr_unchecked = CStr::from_bytes_with_nul_unchecked(data);
assert_eq!(cstr, Ok(cstr_unchecked));
}
}
#[test]
fn from_bytes_with_nul_unterminated() {
let data = b"123";
let cstr = CStr::from_bytes_with_nul(data);
assert!(cstr.is_err());
}
#[test]
fn from_bytes_with_nul_interior() {
let data = b"1\023\0";
let cstr = CStr::from_bytes_with_nul(data);
assert!(cstr.is_err());
}
#[test]
fn into_boxed() {
let orig: &[u8] = b"Hello, world!\0";
let cstr = CStr::from_bytes_with_nul(orig).unwrap();
let boxed: Box<CStr> = Box::from(cstr);
let cstring = cstr.to_owned().into_boxed_c_str().into_c_string();
assert_eq!(cstr, &*boxed);
assert_eq!(&*boxed, &*cstring);
assert_eq!(&*cstring, cstr);
}
#[test]
fn boxed_default() {
let boxed = <Box<CStr>>::default();
assert_eq!(boxed.to_bytes_with_nul(), &[0]);
}
#[test]
fn into_rc() {
let orig: &[u8] = b"Hello, world!\0";
let cstr = CStr::from_bytes_with_nul(orig).unwrap();
let rc: Rc<CStr> = Rc::from(cstr);
let arc: Arc<CStr> = Arc::from(cstr);
assert_eq!(&*rc, cstr);
assert_eq!(&*arc, cstr);
let rc2: Rc<CStr> = Rc::from(cstr.to_owned());
let arc2: Arc<CStr> = Arc::from(cstr.to_owned());
assert_eq!(&*rc2, cstr);
assert_eq!(&*arc2, cstr);
}
}
| 32.027897 | 100 | 0.577376 |
f4f8224103425eed5e38c274dcae96295d585af8 | 18,312 | use descriptor::*;
use descriptorx::*;
use code_writer::*;
use rustproto;
use super::enums::*;
use super::rust_types_values::*;
use super::field::*;
/// Message info for codegen
pub struct MessageGen<'a> {
message: &'a MessageWithScope<'a>,
root_scope: &'a RootScope<'a>,
type_name: String,
pub fields: Vec<FieldGen<'a>>,
pub lite_runtime: bool,
}
impl<'a> MessageGen<'a> {
pub fn new(message: &'a MessageWithScope<'a>, root_scope: &'a RootScope<'a>) -> MessageGen<'a> {
let fields: Vec<_> = message
.fields()
.into_iter()
.map(|field| FieldGen::parse(field, root_scope))
.collect();
MessageGen {
message: message,
root_scope: root_scope,
type_name: message.rust_name(),
fields: fields,
lite_runtime: message
.get_file_descriptor()
.get_options()
.get_optimize_for() ==
FileOptions_OptimizeMode::LITE_RUNTIME,
}
}
fn expose_oneof(&self) -> bool {
let options = self.message.get_scope().get_file_descriptor().get_options();
rustproto::exts::expose_oneof_all
.get(options)
.unwrap_or(false)
}
fn oneofs(&'a self) -> Vec<OneofGen<'a>> {
self.message
.oneofs()
.into_iter()
.map(|oneof| OneofGen::parse(self, oneof))
.collect()
}
fn required_fields(&'a self) -> Vec<&'a FieldGen> {
self.fields
.iter()
.filter(|f| match f.kind {
FieldKind::Singular(ref singular) => singular.flag.is_required(),
_ => false,
})
.collect()
}
fn message_fields(&'a self) -> Vec<&'a FieldGen> {
self.fields
.iter()
.filter(|f| f.proto_type == FieldDescriptorProto_Type::TYPE_MESSAGE)
.collect()
}
fn fields_except_oneof(&'a self) -> Vec<&'a FieldGen> {
self.fields
.iter()
.filter(|f| !f.is_oneof())
.collect()
}
fn fields_except_group(&'a self) -> Vec<&'a FieldGen> {
self.fields
.iter()
.filter(|f| f.proto_type != FieldDescriptorProto_Type::TYPE_GROUP)
.collect()
}
fn fields_except_oneof_and_group(&'a self) -> Vec<&'a FieldGen> {
self.fields
.iter()
.filter(|f| {
!f.is_oneof() && f.proto_type != FieldDescriptorProto_Type::TYPE_GROUP
})
.collect()
}
fn write_match_each_oneof_variant<F>(&self, w: &mut CodeWriter, cb: F)
where
F : Fn(&mut CodeWriter, &OneofVariantGen, &str, &RustType),
{
for oneof in self.oneofs() {
w.if_let_stmt("::std::option::Option::Some(ref v)", &format!("self.{}", oneof.name())[..], |w| {
w.match_block("v", |w| {
for variant in oneof.variants() {
let ref field = variant.field;
let (refv, vtype) =
if !field.elem_type_is_copy() {
("ref v", field.elem().rust_type().ref_type())
} else {
("v", field.elem().rust_type())
};
w.case_block(format!("&{}({})", variant.path(), refv), |w| {
cb(w, &variant, "v", &vtype);
});
}
});
});
}
}
fn write_write_to_with_cached_sizes(&self, w: &mut CodeWriter) {
w.def_fn("write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream) -> ::protobuf::ProtobufResult<()>", |w| {
// To have access to its methods but not polute the name space.
for f in self.fields_except_oneof_and_group() {
f.write_message_write_field(w);
}
self.write_match_each_oneof_variant(w, |w, variant, v, v_type| {
variant.field.write_write_element(w, "os", v, v_type);
});
w.write_line("os.write_unknown_fields(self.get_unknown_fields())?;");
w.write_line("::std::result::Result::Ok(())");
});
}
fn write_get_cached_size(&self, w: &mut CodeWriter) {
w.def_fn("get_cached_size(&self) -> u32", |w| {
w.write_line("self.cached_size.get()");
});
}
fn write_default_instance(&self, w: &mut CodeWriter) {
w.pub_fn(&format!("default_instance() -> &'static {}", self.type_name), |w| {
w.lazy_static_decl_get_simple(
"instance",
&self.type_name,
&format!("{}::new", self.type_name));
});
}
fn write_compute_size(&self, w: &mut CodeWriter) {
// Append sizes of messages in the tree to the specified vector.
// First appended element is size of self, and then nested message sizes.
// in serialization order are appended recursively.");
w.comment("Compute sizes of nested messages");
// there are unused variables in oneof
w.allow(&["unused_variables"]);
w.def_fn("compute_size(&self) -> u32", |w| {
// To have access to its methods but not polute the name space.
w.write_line("let mut my_size = 0;");
for field in self.fields_except_oneof_and_group() {
field.write_message_compute_field_size("my_size", w);
}
self.write_match_each_oneof_variant(w, |w, variant, v, vtype| {
variant.field.write_element_size(w, v, vtype, "my_size");
});
w.write_line(
"my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());",
);
w.write_line("self.cached_size.set(my_size);");
w.write_line("my_size");
});
}
fn write_field_accessors(&self, w: &mut CodeWriter) {
for f in self.fields_except_group() {
w.write_line("");
let reconstruct_def = f.reconstruct_def();
w.comment(&(reconstruct_def + ";"));
w.write_line("");
f.write_message_single_field_accessors(w);
}
}
fn write_impl_self(&self, w: &mut CodeWriter) {
w.impl_self_block(&self.type_name, |w| {
w.pub_fn(&format!("new() -> {}", self.type_name), |w| {
w.write_line("::std::default::Default::default()");
});
w.write_line("");
self.write_default_instance(w);
self.write_field_accessors(w);
});
}
fn write_unknown_fields(&self, w: &mut CodeWriter) {
w.def_fn(
"get_unknown_fields(&self) -> &::protobuf::UnknownFields",
|w| { w.write_line("&self.unknown_fields"); },
);
w.write_line("");
w.def_fn("mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields", |w| {
w.write_line("&mut self.unknown_fields");
});
}
fn write_merge_from(&self, w: &mut CodeWriter) {
w.def_fn(&format!("merge_from(&mut self, is: &mut ::protobuf::CodedInputStream) -> ::protobuf::ProtobufResult<()>"), |w| {
w.while_block("!is.eof()?", |w| {
w.write_line(&format!("let (field_number, wire_type) = is.read_tag_unpack()?;"));
w.match_block("field_number", |w| {
for f in &self.fields_except_group() {
let number = f.proto_field.number();
w.case_block(number.to_string(), |w| {
f.write_merge_from_field(w);
});
}
w.case_block("_", |w| {
w.write_line("::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;");
});
});
});
w.write_line("::std::result::Result::Ok(())");
});
}
fn write_descriptor_field(&self, fields_var: &str, field: &FieldGen, w: &mut CodeWriter) {
let accessor_fn = field.accessor_fn();
w.write_line(&format!(
"{}.push(::protobuf::reflect::accessor::{}(",
fields_var,
accessor_fn.sig()
));
w.indented(|w| {
w.write_line(&format!("\"{}\",", field.proto_field.name()));
for acc in &accessor_fn.accessors {
w.write_line(&format!("{}::{},", self.type_name, acc));
}
});
w.write_line("));");
}
fn write_descriptor_static(&self, w: &mut CodeWriter) {
w.def_fn(&format!("descriptor_static(_: ::std::option::Option<{}>) -> &'static ::protobuf::reflect::MessageDescriptor", self.type_name), |w| {
w.lazy_static_decl_get("descriptor", "::protobuf::reflect::MessageDescriptor", |w| {
let fields = self.fields_except_group();
if fields.is_empty() {
w.write_line(&format!("let fields = ::std::vec::Vec::new();"));
} else {
w.write_line(&format!("let mut fields = ::std::vec::Vec::new();"));
}
for field in fields {
self.write_descriptor_field("fields", field, w);;
}
w.write_line(&format!(
"::protobuf::reflect::MessageDescriptor::new::<{}>(", self.type_name));
w.indented(|w| {
w.write_line(&format!("\"{}\",", self.type_name));
w.write_line("fields,");
w.write_line("file_descriptor_proto()");
});
w.write_line(")");
});
});
}
fn write_is_initialized(&self, w: &mut CodeWriter) {
w.def_fn(&format!("is_initialized(&self) -> bool"), |w| {
// TODO: use single loop
for f in self.required_fields() {
f.write_if_self_field_is_none(w, |w| { w.write_line("return false;"); });
}
for f in self.message_fields() {
if let FieldKind::Map(..) = f.kind {
// TODO: check values
continue;
}
// TODO:
// if message is declared in this file and has no message fields,
// we could skip the check here
f.write_for_self_field(w, "v", |w, _t| {
w.if_stmt(
"!v.is_initialized()",
|w| { w.write_line("return false;"); },
);
});
}
w.write_line("true");
});
}
fn write_impl_message(&self, w: &mut CodeWriter) {
w.impl_for_block("::protobuf::Message", &self.type_name, |w| {
self.write_is_initialized(w);
w.write_line("");
self.write_merge_from(w);
w.write_line("");
self.write_compute_size(w);
w.write_line("");
self.write_write_to_with_cached_sizes(w);
w.write_line("");
self.write_get_cached_size(w);
w.write_line("");
self.write_unknown_fields(w);
w.write_line("");
w.def_fn("as_any(&self) -> &::std::any::Any", |w| {
w.write_line("self as &::std::any::Any");
});
w.def_fn("as_any_mut(&mut self) -> &mut ::std::any::Any", |w| {
w.write_line("self as &mut ::std::any::Any");
});
w.def_fn("into_any(self: Box<Self>) -> ::std::boxed::Box<::std::any::Any>", |w| {
w.write_line("self");
});
w.write_line("");
w.def_fn("descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor", |w| {
w.write_line("::protobuf::MessageStatic::descriptor_static(None::<Self>)");
});
});
}
fn write_impl_message_static(&self, w: &mut CodeWriter) {
w.impl_for_block("::protobuf::MessageStatic", &self.type_name, |w| {
w.def_fn(&format!("new() -> {}", self.type_name), |w| {
w.write_line(&format!("{}::new()", self.type_name));
});
if !self.lite_runtime {
w.write_line("");
self.write_descriptor_static(w);
}
});
}
fn write_impl_value(&self, w: &mut CodeWriter) {
w.impl_for_block("::protobuf::reflect::ProtobufValue", &self.type_name, |w| {
w.def_fn(
"as_ref(&self) -> ::protobuf::reflect::ProtobufValueRef",
|w| w.write_line("::protobuf::reflect::ProtobufValueRef::Message(self)"),
)
})
}
fn write_impl_show(&self, w: &mut CodeWriter) {
w.impl_for_block("::std::fmt::Debug", &self.type_name, |w| {
w.def_fn("fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result", |w| {
w.write_line("::protobuf::text_format::fmt(self, f)");
});
});
}
fn write_impl_clear(&self, w: &mut CodeWriter) {
w.impl_for_block("::protobuf::Clear", &self.type_name, |w| {
w.def_fn("clear(&mut self)", |w| {
// TODO: no need to clear oneof fields in loop
for f in self.fields_except_group() {
let clear_field_func = f.clear_field_func();
w.write_line(&format!("self.{}();", clear_field_func));
}
w.write_line("self.unknown_fields.clear();");
});
});
}
fn write_struct(&self, w: &mut CodeWriter) {
let mut derive = vec!["PartialEq", "Clone", "Default"];
if self.lite_runtime {
derive.push("Debug");
}
w.derive(&derive);
w.pub_struct(&self.type_name, |w| {
if !self.fields_except_oneof().is_empty() {
w.comment("message fields");
for field in self.fields_except_oneof() {
if field.proto_type == FieldDescriptorProto_Type::TYPE_GROUP {
w.comment(&format!("{}: <group>", &field.rust_name));
} else {
let vis = if field.expose_field {
Visibility::Public
} else {
match field.kind {
FieldKind::Repeated(..) => Visibility::Default,
FieldKind::Singular(SingularField { ref flag, .. }) => {
match *flag {
SingularFieldFlag::WithFlag { .. } => Visibility::Default,
SingularFieldFlag::WithoutFlag => Visibility::Public,
}
}
FieldKind::Map(..) => Visibility::Public,
FieldKind::Oneof(..) => unreachable!(),
}
};
w.field_decl_vis(
vis,
&field.rust_name,
&field.full_storage_type().to_string(),
);
}
}
}
if !self.oneofs().is_empty() {
w.comment("message oneof groups");
for oneof in self.oneofs() {
let vis = match self.expose_oneof() {
true => Visibility::Public,
false => Visibility::Default,
};
w.field_decl_vis(vis, oneof.name(), &oneof.full_storage_type().to_string());
}
}
w.comment("special fields");
// TODO: make public
w.field_decl("unknown_fields", "::protobuf::UnknownFields");
w.field_decl("cached_size", "::protobuf::CachedSize");
});
}
pub fn write(&self, w: &mut CodeWriter) {
self.write_struct(w);
// Cell<u32> (which stores cached size) is not Sync
// so messages do not implicitly implement sync.
// `cached_size` could be of type `AtomicUsize`, which could be updated
// with `Ordering::Relaxed`, however:
// * usize is twice as large as u32 on 64-bit, and rust has no `AtomicU32`
// * there's small performance degradation when using `AtomicUsize`, which is
// probably related to https://github.com/rust-lang/rust/pull/30962
// Anyway, `cached_size` is always read after updated from the same thread
// so even in theory the code is incorrect, `u32` write is atomic on all platforms.
w.write_line("");
w.comment(
"see codegen.rs for the explanation why impl Sync explicitly",
);
w.unsafe_impl("::std::marker::Sync", &self.type_name);
for oneof in self.oneofs() {
w.write_line("");
oneof.write_enum(w);
}
w.write_line("");
self.write_impl_self(w);
w.write_line("");
self.write_impl_message(w);
w.write_line("");
self.write_impl_message_static(w);
w.write_line("");
self.write_impl_clear(w);
if !self.lite_runtime {
w.write_line("");
self.write_impl_show(w);
}
w.write_line("");
self.write_impl_value(w);
let mut nested_prefix = self.type_name.to_string();
nested_prefix.push_str("_");
for nested in &self.message.to_scope().get_messages() {
// ignore map entries, because they are not used in map fields
if nested.map_entry().is_none() {
w.write_line("");
MessageGen::new(nested, self.root_scope).write(w);
}
}
for enum_type in &self.message.to_scope().get_enums() {
w.write_line("");
EnumGen::new(enum_type, self.message.get_scope().get_file_descriptor()).write(w);
}
}
}
| 38.632911 | 150 | 0.493556 |
75fb658e66ecf0eb6fec9995ea55f33eb21d69c2 | 781,353 | /// Represents a hardware accelerator type.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum AcceleratorType {
/// Unspecified accelerator type, which means no accelerator.
Unspecified = 0,
/// Nvidia Tesla K80 GPU.
NvidiaTeslaK80 = 1,
/// Nvidia Tesla P100 GPU.
NvidiaTeslaP100 = 2,
/// Nvidia Tesla V100 GPU.
NvidiaTeslaV100 = 3,
/// Nvidia Tesla P4 GPU.
NvidiaTeslaP4 = 4,
/// Nvidia Tesla T4 GPU.
NvidiaTeslaT4 = 5,
/// Nvidia Tesla A100 GPU.
NvidiaTeslaA100 = 8,
}
/// References an API call. It contains more information about long running
/// operation and Jobs that are triggered by the API call.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UserActionReference {
/// The method name of the API RPC call. For example,
/// "/google.cloud.aiplatform.{apiVersion}.DatasetService.CreateDataset"
#[prost(string, tag = "3")]
pub method: ::prost::alloc::string::String,
#[prost(oneof = "user_action_reference::Reference", tags = "1, 2")]
pub reference: ::core::option::Option<user_action_reference::Reference>,
}
/// Nested message and enum types in `UserActionReference`.
pub mod user_action_reference {
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Reference {
/// For API calls that return a long running operation.
/// Resource name of the long running operation.
/// Format:
/// 'projects/{project}/locations/{location}/operations/{operation}'
#[prost(string, tag = "1")]
Operation(::prost::alloc::string::String),
/// For API calls that start a LabelingJob.
/// Resource name of the LabelingJob.
/// Format:
/// 'projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}'
#[prost(string, tag = "2")]
DataLabelingJob(::prost::alloc::string::String),
}
}
/// Used to assign specific AnnotationSpec to a particular area of a DataItem or
/// the whole part of the DataItem.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Annotation {
/// Output only. Resource name of the Annotation.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. Google Cloud Storage URI points to a YAML file describing \[payload][google.cloud.aiplatform.v1.Annotation.payload\]. The
/// schema is defined as an [OpenAPI 3.0.2 Schema
/// Object](<https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject>).
/// The schema files that can be used here are found in
/// gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the
/// chosen schema must be consistent with the parent Dataset's
/// \[metadata][google.cloud.aiplatform.v1.Dataset.metadata_schema_uri\].
#[prost(string, tag = "2")]
pub payload_schema_uri: ::prost::alloc::string::String,
/// Required. The schema of the payload can be found in
/// \[payload_schema][google.cloud.aiplatform.v1.Annotation.payload_schema_uri\].
#[prost(message, optional, tag = "3")]
pub payload: ::core::option::Option<::prost_types::Value>,
/// Output only. Timestamp when this Annotation was created.
#[prost(message, optional, tag = "4")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this Annotation was last updated.
#[prost(message, optional, tag = "7")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// Optional. Used to perform consistent read-modify-write updates. If not set, a blind
/// "overwrite" update happens.
#[prost(string, tag = "8")]
pub etag: ::prost::alloc::string::String,
/// Output only. The source of the Annotation.
#[prost(message, optional, tag = "5")]
pub annotation_source: ::core::option::Option<UserActionReference>,
/// Optional. The labels with user-defined metadata to organize your Annotations.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
/// No more than 64 user labels can be associated with one Annotation(System
/// labels are excluded).
///
/// See <https://goo.gl/xmQnxf> for more information and examples of labels.
/// System reserved label keys are prefixed with "aiplatform.googleapis.com/"
/// and are immutable. Following system labels exist for each Annotation:
///
/// * "aiplatform.googleapis.com/annotation_set_name":
/// optional, name of the UI's annotation set this Annotation belongs to.
/// If not set, the Annotation is not visible in the UI.
///
/// * "aiplatform.googleapis.com/payload_schema":
/// output only, its value is the \[payload_schema's][google.cloud.aiplatform.v1.Annotation.payload_schema_uri\]
/// title.
#[prost(map = "string, string", tag = "6")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
}
/// Identifies a concept with which DataItems may be annotated with.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AnnotationSpec {
/// Output only. Resource name of the AnnotationSpec.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. The user-defined name of the AnnotationSpec.
/// The name can be up to 128 characters long and can be consist of any UTF-8
/// characters.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// Output only. Timestamp when this AnnotationSpec was created.
#[prost(message, optional, tag = "3")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when AnnotationSpec was last updated.
#[prost(message, optional, tag = "4")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// Optional. Used to perform consistent read-modify-write updates. If not set, a blind
/// "overwrite" update happens.
#[prost(string, tag = "5")]
pub etag: ::prost::alloc::string::String,
}
/// Value is the value of the field.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Value {
#[prost(oneof = "value::Value", tags = "1, 2, 3")]
pub value: ::core::option::Option<value::Value>,
}
/// Nested message and enum types in `Value`.
pub mod value {
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Value {
/// An integer value.
#[prost(int64, tag = "1")]
IntValue(i64),
/// A double value.
#[prost(double, tag = "2")]
DoubleValue(f64),
/// A string value.
#[prost(string, tag = "3")]
StringValue(::prost::alloc::string::String),
}
}
/// Instance of a general artifact.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Artifact {
/// Output only. The resource name of the Artifact.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// User provided display name of the Artifact.
/// May be up to 128 Unicode characters.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// The uniform resource identifier of the artifact file.
/// May be empty if there is no actual artifact file.
#[prost(string, tag = "6")]
pub uri: ::prost::alloc::string::String,
/// An eTag used to perform consistent read-modify-write updates. If not set, a
/// blind "overwrite" update happens.
#[prost(string, tag = "9")]
pub etag: ::prost::alloc::string::String,
/// The labels with user-defined metadata to organize your Artifacts.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
/// No more than 64 user labels can be associated with one Artifact (System
/// labels are excluded).
#[prost(map = "string, string", tag = "10")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Output only. Timestamp when this Artifact was created.
#[prost(message, optional, tag = "11")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this Artifact was last updated.
#[prost(message, optional, tag = "12")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// The state of this Artifact. This is a property of the Artifact, and does
/// not imply or capture any ongoing process. This property is managed by
/// clients (such as Vertex Pipelines), and the system does not prescribe
/// or check the validity of state transitions.
#[prost(enumeration = "artifact::State", tag = "13")]
pub state: i32,
/// The title of the schema describing the metadata.
///
/// Schema title and version is expected to be registered in earlier Create
/// Schema calls. And both are used together as unique identifiers to identify
/// schemas within the local metadata store.
#[prost(string, tag = "14")]
pub schema_title: ::prost::alloc::string::String,
/// The version of the schema in schema_name to use.
///
/// Schema title and version is expected to be registered in earlier Create
/// Schema calls. And both are used together as unique identifiers to identify
/// schemas within the local metadata store.
#[prost(string, tag = "15")]
pub schema_version: ::prost::alloc::string::String,
/// Properties of the Artifact.
/// The size of this field should not exceed 200KB.
#[prost(message, optional, tag = "16")]
pub metadata: ::core::option::Option<::prost_types::Struct>,
/// Description of the Artifact
#[prost(string, tag = "17")]
pub description: ::prost::alloc::string::String,
}
/// Nested message and enum types in `Artifact`.
pub mod artifact {
/// Describes the state of the Artifact.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum State {
/// Unspecified state for the Artifact.
Unspecified = 0,
/// A state used by systems like Vertex Pipelines to indicate that the
/// underlying data item represented by this Artifact is being created.
Pending = 1,
/// A state indicating that the Artifact should exist, unless something
/// external to the system deletes it.
Live = 2,
}
}
/// Success and error statistics of processing multiple entities
/// (for example, DataItems or structured data rows) in batch.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CompletionStats {
/// Output only. The number of entities that had been processed successfully.
#[prost(int64, tag = "1")]
pub successful_count: i64,
/// Output only. The number of entities for which any error was encountered.
#[prost(int64, tag = "2")]
pub failed_count: i64,
/// Output only. In cases when enough errors are encountered a job, pipeline, or operation
/// may be failed as a whole. Below is the number of entities for which the
/// processing had not been finished (either in successful or failed state).
/// Set to -1 if the number is unknown (for example, the operation failed
/// before the total entity number could be collected).
#[prost(int64, tag = "3")]
pub incomplete_count: i64,
}
/// Represents a customer-managed encryption key spec that can be applied to
/// a top-level resource.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct EncryptionSpec {
/// Required. The Cloud KMS resource identifier of the customer managed encryption key
/// used to protect a resource. Has the form:
/// `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`.
/// The key needs to be in the same region as where the compute resource is
/// created.
#[prost(string, tag = "1")]
pub kms_key_name: ::prost::alloc::string::String,
}
/// Metadata describing the Model's input and output for explanation.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExplanationMetadata {
/// Required. Map from feature names to feature input metadata. Keys are the name of the
/// features. Values are the specification of the feature.
///
/// An empty InputMetadata is valid. It describes a text feature which has the
/// name specified as the key in \[ExplanationMetadata.inputs][google.cloud.aiplatform.v1.ExplanationMetadata.inputs\]. The baseline
/// of the empty feature is chosen by Vertex AI.
///
/// For Vertex AI-provided Tensorflow images, the key can be any friendly
/// name of the feature. Once specified,
/// \[featureAttributions][google.cloud.aiplatform.v1.Attribution.feature_attributions\] are keyed by
/// this key (if not grouped with another feature).
///
/// For custom images, the key must match with the key in
/// \[instance][google.cloud.aiplatform.v1.ExplainRequest.instances\].
#[prost(map = "string, message", tag = "1")]
pub inputs: ::std::collections::HashMap<
::prost::alloc::string::String,
explanation_metadata::InputMetadata,
>,
/// Required. Map from output names to output metadata.
///
/// For Vertex AI-provided Tensorflow images, keys can be any user defined
/// string that consists of any UTF-8 characters.
///
/// For custom images, keys are the name of the output field in the prediction
/// to be explained.
///
/// Currently only one key is allowed.
#[prost(map = "string, message", tag = "2")]
pub outputs: ::std::collections::HashMap<
::prost::alloc::string::String,
explanation_metadata::OutputMetadata,
>,
/// Points to a YAML file stored on Google Cloud Storage describing the format
/// of the [feature attributions]\[google.cloud.aiplatform.v1.Attribution.feature_attributions\].
/// The schema is defined as an OpenAPI 3.0.2 [Schema
/// Object](<https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject>).
/// AutoML tabular Models always have this field populated by Vertex AI.
/// Note: The URI given on output may be different, including the URI scheme,
/// than the one given on input. The output URI will point to a location where
/// the user only has a read access.
#[prost(string, tag = "3")]
pub feature_attributions_schema_uri: ::prost::alloc::string::String,
}
/// Nested message and enum types in `ExplanationMetadata`.
pub mod explanation_metadata {
/// Metadata of the input of a feature.
///
/// Fields other than \[InputMetadata.input_baselines][google.cloud.aiplatform.v1.ExplanationMetadata.InputMetadata.input_baselines\] are applicable only
/// for Models that are using Vertex AI-provided images for Tensorflow.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct InputMetadata {
/// Baseline inputs for this feature.
///
/// If no baseline is specified, Vertex AI chooses the baseline for this
/// feature. If multiple baselines are specified, Vertex AI returns the
/// average attributions across them in \[Attribution.feature_attributions][google.cloud.aiplatform.v1.Attribution.feature_attributions\].
///
/// For Vertex AI-provided Tensorflow images (both 1.x and 2.x), the shape
/// of each baseline must match the shape of the input tensor. If a scalar is
/// provided, we broadcast to the same shape as the input tensor.
///
/// For custom images, the element of the baselines must be in the same
/// format as the feature's input in the
/// \[instance][google.cloud.aiplatform.v1.ExplainRequest.instances][\]. The schema of any single instance
/// may be specified via Endpoint's DeployedModels'
/// \[Model's][google.cloud.aiplatform.v1.DeployedModel.model\]
/// \[PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata\]
/// \[instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri\].
#[prost(message, repeated, tag = "1")]
pub input_baselines: ::prost::alloc::vec::Vec<::prost_types::Value>,
/// Name of the input tensor for this feature. Required and is only
/// applicable to Vertex AI-provided images for Tensorflow.
#[prost(string, tag = "2")]
pub input_tensor_name: ::prost::alloc::string::String,
/// Defines how the feature is encoded into the input tensor. Defaults to
/// IDENTITY.
#[prost(enumeration = "input_metadata::Encoding", tag = "3")]
pub encoding: i32,
/// Modality of the feature. Valid values are: numeric, image. Defaults to
/// numeric.
#[prost(string, tag = "4")]
pub modality: ::prost::alloc::string::String,
/// The domain details of the input feature value. Like min/max, original
/// mean or standard deviation if normalized.
#[prost(message, optional, tag = "5")]
pub feature_value_domain: ::core::option::Option<input_metadata::FeatureValueDomain>,
/// Specifies the index of the values of the input tensor.
/// Required when the input tensor is a sparse representation. Refer to
/// Tensorflow documentation for more details:
/// <https://www.tensorflow.org/api_docs/python/tf/sparse/SparseTensor.>
#[prost(string, tag = "6")]
pub indices_tensor_name: ::prost::alloc::string::String,
/// Specifies the shape of the values of the input if the input is a sparse
/// representation. Refer to Tensorflow documentation for more details:
/// <https://www.tensorflow.org/api_docs/python/tf/sparse/SparseTensor.>
#[prost(string, tag = "7")]
pub dense_shape_tensor_name: ::prost::alloc::string::String,
/// A list of feature names for each index in the input tensor.
/// Required when the input \[InputMetadata.encoding][google.cloud.aiplatform.v1.ExplanationMetadata.InputMetadata.encoding\] is BAG_OF_FEATURES,
/// BAG_OF_FEATURES_SPARSE, INDICATOR.
#[prost(string, repeated, tag = "8")]
pub index_feature_mapping: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Encoded tensor is a transformation of the input tensor. Must be provided
/// if choosing
/// [Integrated Gradients attribution]\[google.cloud.aiplatform.v1.ExplanationParameters.integrated_gradients_attribution\]
/// or [XRAI attribution]\[google.cloud.aiplatform.v1.ExplanationParameters.xrai_attribution\] and the
/// input tensor is not differentiable.
///
/// An encoded tensor is generated if the input tensor is encoded by a lookup
/// table.
#[prost(string, tag = "9")]
pub encoded_tensor_name: ::prost::alloc::string::String,
/// A list of baselines for the encoded tensor.
///
/// The shape of each baseline should match the shape of the encoded tensor.
/// If a scalar is provided, Vertex AI broadcasts to the same shape as the
/// encoded tensor.
#[prost(message, repeated, tag = "10")]
pub encoded_baselines: ::prost::alloc::vec::Vec<::prost_types::Value>,
/// Visualization configurations for image explanation.
#[prost(message, optional, tag = "11")]
pub visualization: ::core::option::Option<input_metadata::Visualization>,
/// Name of the group that the input belongs to. Features with the same group
/// name will be treated as one feature when computing attributions. Features
/// grouped together can have different shapes in value. If provided, there
/// will be one single attribution generated in
/// \[Attribution.feature_attributions][google.cloud.aiplatform.v1.Attribution.feature_attributions\], keyed by the group name.
#[prost(string, tag = "12")]
pub group_name: ::prost::alloc::string::String,
}
/// Nested message and enum types in `InputMetadata`.
pub mod input_metadata {
/// Domain details of the input feature value. Provides numeric information
/// about the feature, such as its range (min, max). If the feature has been
/// pre-processed, for example with z-scoring, then it provides information
/// about how to recover the original feature. For example, if the input
/// feature is an image and it has been pre-processed to obtain 0-mean and
/// stddev = 1 values, then original_mean, and original_stddev refer to the
/// mean and stddev of the original feature (e.g. image tensor) from which
/// input feature (with mean = 0 and stddev = 1) was obtained.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FeatureValueDomain {
/// The minimum permissible value for this feature.
#[prost(float, tag = "1")]
pub min_value: f32,
/// The maximum permissible value for this feature.
#[prost(float, tag = "2")]
pub max_value: f32,
/// If this input feature has been normalized to a mean value of 0,
/// the original_mean specifies the mean value of the domain prior to
/// normalization.
#[prost(float, tag = "3")]
pub original_mean: f32,
/// If this input feature has been normalized to a standard deviation of
/// 1.0, the original_stddev specifies the standard deviation of the domain
/// prior to normalization.
#[prost(float, tag = "4")]
pub original_stddev: f32,
}
/// Visualization configurations for image explanation.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Visualization {
/// Type of the image visualization. Only applicable to
/// [Integrated Gradients attribution]\[google.cloud.aiplatform.v1.ExplanationParameters.integrated_gradients_attribution\].
/// OUTLINES shows regions of attribution, while PIXELS shows per-pixel
/// attribution. Defaults to OUTLINES.
#[prost(enumeration = "visualization::Type", tag = "1")]
pub r#type: i32,
/// Whether to only highlight pixels with positive contributions, negative
/// or both. Defaults to POSITIVE.
#[prost(enumeration = "visualization::Polarity", tag = "2")]
pub polarity: i32,
/// The color scheme used for the highlighted areas.
///
/// Defaults to PINK_GREEN for
/// [Integrated Gradients attribution]\[google.cloud.aiplatform.v1.ExplanationParameters.integrated_gradients_attribution\],
/// which shows positive attributions in green and negative in pink.
///
/// Defaults to VIRIDIS for
/// [XRAI attribution]\[google.cloud.aiplatform.v1.ExplanationParameters.xrai_attribution\], which
/// highlights the most influential regions in yellow and the least
/// influential in blue.
#[prost(enumeration = "visualization::ColorMap", tag = "3")]
pub color_map: i32,
/// Excludes attributions above the specified percentile from the
/// highlighted areas. Using the clip_percent_upperbound and
/// clip_percent_lowerbound together can be useful for filtering out noise
/// and making it easier to see areas of strong attribution. Defaults to
/// 99.9.
#[prost(float, tag = "4")]
pub clip_percent_upperbound: f32,
/// Excludes attributions below the specified percentile, from the
/// highlighted areas. Defaults to 62.
#[prost(float, tag = "5")]
pub clip_percent_lowerbound: f32,
/// How the original image is displayed in the visualization.
/// Adjusting the overlay can help increase visual clarity if the original
/// image makes it difficult to view the visualization. Defaults to NONE.
#[prost(enumeration = "visualization::OverlayType", tag = "6")]
pub overlay_type: i32,
}
/// Nested message and enum types in `Visualization`.
pub mod visualization {
/// Type of the image visualization. Only applicable to
/// [Integrated Gradients attribution]\[google.cloud.aiplatform.v1.ExplanationParameters.integrated_gradients_attribution\].
#[derive(
Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
)]
#[repr(i32)]
pub enum Type {
/// Should not be used.
Unspecified = 0,
/// Shows which pixel contributed to the image prediction.
Pixels = 1,
/// Shows which region contributed to the image prediction by outlining
/// the region.
Outlines = 2,
}
/// Whether to only highlight pixels with positive contributions, negative
/// or both. Defaults to POSITIVE.
#[derive(
Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
)]
#[repr(i32)]
pub enum Polarity {
/// Default value. This is the same as POSITIVE.
Unspecified = 0,
/// Highlights the pixels/outlines that were most influential to the
/// model's prediction.
Positive = 1,
/// Setting polarity to negative highlights areas that does not lead to
/// the models's current prediction.
Negative = 2,
/// Shows both positive and negative attributions.
Both = 3,
}
/// The color scheme used for highlighting areas.
#[derive(
Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
)]
#[repr(i32)]
pub enum ColorMap {
/// Should not be used.
Unspecified = 0,
/// Positive: green. Negative: pink.
PinkGreen = 1,
/// Viridis color map: A perceptually uniform color mapping which is
/// easier to see by those with colorblindness and progresses from yellow
/// to green to blue. Positive: yellow. Negative: blue.
Viridis = 2,
/// Positive: red. Negative: red.
Red = 3,
/// Positive: green. Negative: green.
Green = 4,
/// Positive: green. Negative: red.
RedGreen = 6,
/// PiYG palette.
PinkWhiteGreen = 5,
}
/// How the original image is displayed in the visualization.
#[derive(
Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
)]
#[repr(i32)]
pub enum OverlayType {
/// Default value. This is the same as NONE.
Unspecified = 0,
/// No overlay.
None = 1,
/// The attributions are shown on top of the original image.
Original = 2,
/// The attributions are shown on top of grayscaled version of the
/// original image.
Grayscale = 3,
/// The attributions are used as a mask to reveal predictive parts of
/// the image and hide the un-predictive parts.
MaskBlack = 4,
}
}
/// Defines how a feature is encoded. Defaults to IDENTITY.
#[derive(
Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
)]
#[repr(i32)]
pub enum Encoding {
/// Default value. This is the same as IDENTITY.
Unspecified = 0,
/// The tensor represents one feature.
Identity = 1,
/// The tensor represents a bag of features where each index maps to
/// a feature. \[InputMetadata.index_feature_mapping][google.cloud.aiplatform.v1.ExplanationMetadata.InputMetadata.index_feature_mapping\] must be provided for
/// this encoding. For example:
/// ```
/// input = [27, 6.0, 150]
/// index_feature_mapping = ["age", "height", "weight"]
/// ```
BagOfFeatures = 2,
/// The tensor represents a bag of features where each index maps to a
/// feature. Zero values in the tensor indicates feature being
/// non-existent. \[InputMetadata.index_feature_mapping][google.cloud.aiplatform.v1.ExplanationMetadata.InputMetadata.index_feature_mapping\] must be provided
/// for this encoding. For example:
/// ```
/// input = [2, 0, 5, 0, 1]
/// index_feature_mapping = ["a", "b", "c", "d", "e"]
/// ```
BagOfFeaturesSparse = 3,
/// The tensor is a list of binaries representing whether a feature exists
/// or not (1 indicates existence). \[InputMetadata.index_feature_mapping][google.cloud.aiplatform.v1.ExplanationMetadata.InputMetadata.index_feature_mapping\]
/// must be provided for this encoding. For example:
/// ```
/// input = [1, 0, 1, 0, 1]
/// index_feature_mapping = ["a", "b", "c", "d", "e"]
/// ```
Indicator = 4,
/// The tensor is encoded into a 1-dimensional array represented by an
/// encoded tensor. \[InputMetadata.encoded_tensor_name][google.cloud.aiplatform.v1.ExplanationMetadata.InputMetadata.encoded_tensor_name\] must be provided
/// for this encoding. For example:
/// ```
/// input = ["This", "is", "a", "test", "."]
/// encoded = [0.1, 0.2, 0.3, 0.4, 0.5]
/// ```
CombinedEmbedding = 5,
/// Select this encoding when the input tensor is encoded into a
/// 2-dimensional array represented by an encoded tensor.
/// \[InputMetadata.encoded_tensor_name][google.cloud.aiplatform.v1.ExplanationMetadata.InputMetadata.encoded_tensor_name\] must be provided for this
/// encoding. The first dimension of the encoded tensor's shape is the same
/// as the input tensor's shape. For example:
/// ```
/// input = ["This", "is", "a", "test", "."]
/// encoded = [[0.1, 0.2, 0.3, 0.4, 0.5],
/// [0.2, 0.1, 0.4, 0.3, 0.5],
/// [0.5, 0.1, 0.3, 0.5, 0.4],
/// [0.5, 0.3, 0.1, 0.2, 0.4],
/// [0.4, 0.3, 0.2, 0.5, 0.1]]
/// ```
ConcatEmbedding = 6,
}
}
/// Metadata of the prediction output to be explained.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct OutputMetadata {
/// Name of the output tensor. Required and is only applicable to Vertex
/// AI provided images for Tensorflow.
#[prost(string, tag = "3")]
pub output_tensor_name: ::prost::alloc::string::String,
/// Defines how to map \[Attribution.output_index][google.cloud.aiplatform.v1.Attribution.output_index\] to
/// \[Attribution.output_display_name][google.cloud.aiplatform.v1.Attribution.output_display_name\].
///
/// If neither of the fields are specified,
/// \[Attribution.output_display_name][google.cloud.aiplatform.v1.Attribution.output_display_name\] will not be populated.
#[prost(oneof = "output_metadata::DisplayNameMapping", tags = "1, 2")]
pub display_name_mapping: ::core::option::Option<output_metadata::DisplayNameMapping>,
}
/// Nested message and enum types in `OutputMetadata`.
pub mod output_metadata {
/// Defines how to map \[Attribution.output_index][google.cloud.aiplatform.v1.Attribution.output_index\] to
/// \[Attribution.output_display_name][google.cloud.aiplatform.v1.Attribution.output_display_name\].
///
/// If neither of the fields are specified,
/// \[Attribution.output_display_name][google.cloud.aiplatform.v1.Attribution.output_display_name\] will not be populated.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum DisplayNameMapping {
/// Static mapping between the index and display name.
///
/// Use this if the outputs are a deterministic n-dimensional array, e.g. a
/// list of scores of all the classes in a pre-defined order for a
/// multi-classification Model. It's not feasible if the outputs are
/// non-deterministic, e.g. the Model produces top-k classes or sort the
/// outputs by their values.
///
/// The shape of the value must be an n-dimensional array of strings. The
/// number of dimensions must match that of the outputs to be explained.
/// The \[Attribution.output_display_name][google.cloud.aiplatform.v1.Attribution.output_display_name\] is populated by locating in the
/// mapping with \[Attribution.output_index][google.cloud.aiplatform.v1.Attribution.output_index\].
#[prost(message, tag = "1")]
IndexDisplayNameMapping(::prost_types::Value),
/// Specify a field name in the prediction to look for the display name.
///
/// Use this if the prediction contains the display names for the outputs.
///
/// The display names in the prediction must have the same shape of the
/// outputs, so that it can be located by \[Attribution.output_index][google.cloud.aiplatform.v1.Attribution.output_index\] for
/// a specific output.
#[prost(string, tag = "2")]
DisplayNameMappingKey(::prost::alloc::string::String),
}
}
}
/// The storage details for Avro input content.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AvroSource {
/// Required. Google Cloud Storage location.
#[prost(message, optional, tag = "1")]
pub gcs_source: ::core::option::Option<GcsSource>,
}
/// The storage details for CSV input content.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CsvSource {
/// Required. Google Cloud Storage location.
#[prost(message, optional, tag = "1")]
pub gcs_source: ::core::option::Option<GcsSource>,
}
/// The Google Cloud Storage location for the input content.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GcsSource {
/// Required. Google Cloud Storage URI(-s) to the input file(s). May contain
/// wildcards. For more information on wildcards, see
/// <https://cloud.google.com/storage/docs/gsutil/addlhelp/WildcardNames.>
#[prost(string, repeated, tag = "1")]
pub uris: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
/// The Google Cloud Storage location where the output is to be written to.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GcsDestination {
/// Required. Google Cloud Storage URI to output directory. If the uri doesn't end with
/// '/', a '/' will be automatically appended. The directory is created if it
/// doesn't exist.
#[prost(string, tag = "1")]
pub output_uri_prefix: ::prost::alloc::string::String,
}
/// The BigQuery location for the input content.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BigQuerySource {
/// Required. BigQuery URI to a table, up to 2000 characters long.
/// Accepted forms:
///
/// * BigQuery path. For example: `bq://projectId.bqDatasetId.bqTableId`.
#[prost(string, tag = "1")]
pub input_uri: ::prost::alloc::string::String,
}
/// The BigQuery location for the output content.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BigQueryDestination {
/// Required. BigQuery URI to a project or table, up to 2000 characters long.
///
/// When only the project is specified, the Dataset and Table is created.
/// When the full table reference is specified, the Dataset must exist and
/// table must not exist.
///
/// Accepted forms:
///
/// * BigQuery path. For example:
/// `bq://projectId` or `bq://projectId.bqDatasetId` or
/// `bq://projectId.bqDatasetId.bqTableId`.
#[prost(string, tag = "1")]
pub output_uri: ::prost::alloc::string::String,
}
/// The storage details for CSV output content.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CsvDestination {
/// Required. Google Cloud Storage location.
#[prost(message, optional, tag = "1")]
pub gcs_destination: ::core::option::Option<GcsDestination>,
}
/// The storage details for TFRecord output content.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TfRecordDestination {
/// Required. Google Cloud Storage location.
#[prost(message, optional, tag = "1")]
pub gcs_destination: ::core::option::Option<GcsDestination>,
}
/// The Container Registry location for the container image.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ContainerRegistryDestination {
/// Required. Container Registry URI of a container image.
/// Only Google Container Registry and Artifact Registry are supported now.
/// Accepted forms:
///
/// * Google Container Registry path. For example:
/// `gcr.io/projectId/imageName:tag`.
///
/// * Artifact Registry path. For example:
/// `us-central1-docker.pkg.dev/projectId/repoName/imageName:tag`.
///
/// If a tag is not specified, "latest" will be used as the default tag.
#[prost(string, tag = "1")]
pub output_uri: ::prost::alloc::string::String,
}
/// Explanation of a prediction (provided in \[PredictResponse.predictions][google.cloud.aiplatform.v1.PredictResponse.predictions\])
/// produced by the Model on a given \[instance][google.cloud.aiplatform.v1.ExplainRequest.instances\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Explanation {
/// Output only. Feature attributions grouped by predicted outputs.
///
/// For Models that predict only one output, such as regression Models that
/// predict only one score, there is only one attibution that explains the
/// predicted output. For Models that predict multiple outputs, such as
/// multiclass Models that predict multiple classes, each element explains one
/// specific item. \[Attribution.output_index][google.cloud.aiplatform.v1.Attribution.output_index\] can be used to identify which
/// output this attribution is explaining.
///
/// If users set \[ExplanationParameters.top_k][google.cloud.aiplatform.v1.ExplanationParameters.top_k\], the attributions are sorted
/// by \[instance_output_value][Attributions.instance_output_value\] in
/// descending order. If \[ExplanationParameters.output_indices][google.cloud.aiplatform.v1.ExplanationParameters.output_indices\] is specified,
/// the attributions are stored by \[Attribution.output_index][google.cloud.aiplatform.v1.Attribution.output_index\] in the same
/// order as they appear in the output_indices.
#[prost(message, repeated, tag = "1")]
pub attributions: ::prost::alloc::vec::Vec<Attribution>,
}
/// Aggregated explanation metrics for a Model over a set of instances.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ModelExplanation {
/// Output only. Aggregated attributions explaining the Model's prediction outputs over the
/// set of instances. The attributions are grouped by outputs.
///
/// For Models that predict only one output, such as regression Models that
/// predict only one score, there is only one attibution that explains the
/// predicted output. For Models that predict multiple outputs, such as
/// multiclass Models that predict multiple classes, each element explains one
/// specific item. \[Attribution.output_index][google.cloud.aiplatform.v1.Attribution.output_index\] can be used to identify which
/// output this attribution is explaining.
///
/// The \[baselineOutputValue][google.cloud.aiplatform.v1.Attribution.baseline_output_value\],
/// \[instanceOutputValue][google.cloud.aiplatform.v1.Attribution.instance_output_value\] and
/// \[featureAttributions][google.cloud.aiplatform.v1.Attribution.feature_attributions\] fields are
/// averaged over the test data.
///
/// NOTE: Currently AutoML tabular classification Models produce only one
/// attribution, which averages attributions over all the classes it predicts.
/// \[Attribution.approximation_error][google.cloud.aiplatform.v1.Attribution.approximation_error\] is not populated.
#[prost(message, repeated, tag = "1")]
pub mean_attributions: ::prost::alloc::vec::Vec<Attribution>,
}
/// Attribution that explains a particular prediction output.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Attribution {
/// Output only. Model predicted output if the input instance is constructed from the
/// baselines of all the features defined in \[ExplanationMetadata.inputs][google.cloud.aiplatform.v1.ExplanationMetadata.inputs\].
/// The field name of the output is determined by the key in
/// \[ExplanationMetadata.outputs][google.cloud.aiplatform.v1.ExplanationMetadata.outputs\].
///
/// If the Model's predicted output has multiple dimensions (rank > 1), this is
/// the value in the output located by \[output_index][google.cloud.aiplatform.v1.Attribution.output_index\].
///
/// If there are multiple baselines, their output values are averaged.
#[prost(double, tag = "1")]
pub baseline_output_value: f64,
/// Output only. Model predicted output on the corresponding [explanation
/// instance]\[ExplainRequest.instances\]. The field name of the output is
/// determined by the key in \[ExplanationMetadata.outputs][google.cloud.aiplatform.v1.ExplanationMetadata.outputs\].
///
/// If the Model predicted output has multiple dimensions, this is the value in
/// the output located by \[output_index][google.cloud.aiplatform.v1.Attribution.output_index\].
#[prost(double, tag = "2")]
pub instance_output_value: f64,
/// Output only. Attributions of each explained feature. Features are extracted from
/// the [prediction instances]\[google.cloud.aiplatform.v1.ExplainRequest.instances\] according to
/// [explanation metadata for inputs]\[google.cloud.aiplatform.v1.ExplanationMetadata.inputs\].
///
/// The value is a struct, whose keys are the name of the feature. The values
/// are how much the feature in the \[instance][google.cloud.aiplatform.v1.ExplainRequest.instances\]
/// contributed to the predicted result.
///
/// The format of the value is determined by the feature's input format:
///
/// * If the feature is a scalar value, the attribution value is a
/// [floating number]\[google.protobuf.Value.number_value\].
///
/// * If the feature is an array of scalar values, the attribution value is
/// an \[array][google.protobuf.Value.list_value\].
///
/// * If the feature is a struct, the attribution value is a
/// \[struct][google.protobuf.Value.struct_value\]. The keys in the
/// attribution value struct are the same as the keys in the feature
/// struct. The formats of the values in the attribution struct are
/// determined by the formats of the values in the feature struct.
///
/// The \[ExplanationMetadata.feature_attributions_schema_uri][google.cloud.aiplatform.v1.ExplanationMetadata.feature_attributions_schema_uri\] field,
/// pointed to by the \[ExplanationSpec][google.cloud.aiplatform.v1.ExplanationSpec\] field of the
/// \[Endpoint.deployed_models][google.cloud.aiplatform.v1.Endpoint.deployed_models\] object, points to the schema file that
/// describes the features and their attribution values (if it is populated).
#[prost(message, optional, tag = "3")]
pub feature_attributions: ::core::option::Option<::prost_types::Value>,
/// Output only. The index that locates the explained prediction output.
///
/// If the prediction output is a scalar value, output_index is not populated.
/// If the prediction output has multiple dimensions, the length of the
/// output_index list is the same as the number of dimensions of the output.
/// The i-th element in output_index is the element index of the i-th dimension
/// of the output vector. Indices start from 0.
#[prost(int32, repeated, packed = "false", tag = "4")]
pub output_index: ::prost::alloc::vec::Vec<i32>,
/// Output only. The display name of the output identified by \[output_index][google.cloud.aiplatform.v1.Attribution.output_index\]. For example,
/// the predicted class name by a multi-classification Model.
///
/// This field is only populated iff the Model predicts display names as a
/// separate field along with the explained output. The predicted display name
/// must has the same shape of the explained output, and can be located using
/// output_index.
#[prost(string, tag = "5")]
pub output_display_name: ::prost::alloc::string::String,
/// Output only. Error of \[feature_attributions][google.cloud.aiplatform.v1.Attribution.feature_attributions\] caused by approximation used in the
/// explanation method. Lower value means more precise attributions.
///
/// * For Sampled Shapley
/// \[attribution][google.cloud.aiplatform.v1.ExplanationParameters.sampled_shapley_attribution\],
/// increasing \[path_count][google.cloud.aiplatform.v1.SampledShapleyAttribution.path_count\] might reduce
/// the error.
/// * For Integrated Gradients
/// \[attribution][google.cloud.aiplatform.v1.ExplanationParameters.integrated_gradients_attribution\],
/// increasing \[step_count][google.cloud.aiplatform.v1.IntegratedGradientsAttribution.step_count\] might
/// reduce the error.
/// * For [XRAI attribution]\[google.cloud.aiplatform.v1.ExplanationParameters.xrai_attribution\],
/// increasing
/// \[step_count][google.cloud.aiplatform.v1.XraiAttribution.step_count\] might reduce the error.
///
/// See [this introduction](/vertex-ai/docs/explainable-ai/overview)
/// for more information.
#[prost(double, tag = "6")]
pub approximation_error: f64,
/// Output only. Name of the explain output. Specified as the key in
/// \[ExplanationMetadata.outputs][google.cloud.aiplatform.v1.ExplanationMetadata.outputs\].
#[prost(string, tag = "7")]
pub output_name: ::prost::alloc::string::String,
}
/// Specification of Model explanation.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExplanationSpec {
/// Required. Parameters that configure explaining of the Model's predictions.
#[prost(message, optional, tag = "1")]
pub parameters: ::core::option::Option<ExplanationParameters>,
/// Required. Metadata describing the Model's input and output for explanation.
#[prost(message, optional, tag = "2")]
pub metadata: ::core::option::Option<ExplanationMetadata>,
}
/// Parameters to configure explaining for Model's predictions.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExplanationParameters {
/// If populated, returns attributions for top K indices of outputs
/// (defaults to 1). Only applies to Models that predicts more than one outputs
/// (e,g, multi-class Models). When set to -1, returns explanations for all
/// outputs.
#[prost(int32, tag = "4")]
pub top_k: i32,
/// If populated, only returns attributions that have
/// \[output_index][google.cloud.aiplatform.v1.Attribution.output_index\] contained in output_indices. It
/// must be an ndarray of integers, with the same shape of the output it's
/// explaining.
///
/// If not populated, returns attributions for \[top_k][google.cloud.aiplatform.v1.ExplanationParameters.top_k\] indices of outputs.
/// If neither top_k nor output_indeices is populated, returns the argmax
/// index of the outputs.
///
/// Only applicable to Models that predict multiple outputs (e,g, multi-class
/// Models that predict multiple classes).
#[prost(message, optional, tag = "5")]
pub output_indices: ::core::option::Option<::prost_types::ListValue>,
#[prost(oneof = "explanation_parameters::Method", tags = "1, 2, 3")]
pub method: ::core::option::Option<explanation_parameters::Method>,
}
/// Nested message and enum types in `ExplanationParameters`.
pub mod explanation_parameters {
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Method {
/// An attribution method that approximates Shapley values for features that
/// contribute to the label being predicted. A sampling strategy is used to
/// approximate the value rather than considering all subsets of features.
/// Refer to this paper for model details: <https://arxiv.org/abs/1306.4265.>
#[prost(message, tag = "1")]
SampledShapleyAttribution(super::SampledShapleyAttribution),
/// An attribution method that computes Aumann-Shapley values taking
/// advantage of the model's fully differentiable structure. Refer to this
/// paper for more details: <https://arxiv.org/abs/1703.01365>
#[prost(message, tag = "2")]
IntegratedGradientsAttribution(super::IntegratedGradientsAttribution),
/// An attribution method that redistributes Integrated Gradients
/// attribution to segmented regions, taking advantage of the model's fully
/// differentiable structure. Refer to this paper for
/// more details: <https://arxiv.org/abs/1906.02825>
///
/// XRAI currently performs better on natural images, like a picture of a
/// house or an animal. If the images are taken in artificial environments,
/// like a lab or manufacturing line, or from diagnostic equipment, like
/// x-rays or quality-control cameras, use Integrated Gradients instead.
#[prost(message, tag = "3")]
XraiAttribution(super::XraiAttribution),
}
}
/// An attribution method that approximates Shapley values for features that
/// contribute to the label being predicted. A sampling strategy is used to
/// approximate the value rather than considering all subsets of features.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SampledShapleyAttribution {
/// Required. The number of feature permutations to consider when approximating the
/// Shapley values.
///
/// Valid range of its value is [1, 50], inclusively.
#[prost(int32, tag = "1")]
pub path_count: i32,
}
/// An attribution method that computes the Aumann-Shapley value taking advantage
/// of the model's fully differentiable structure. Refer to this paper for
/// more details: <https://arxiv.org/abs/1703.01365>
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct IntegratedGradientsAttribution {
/// Required. The number of steps for approximating the path integral.
/// A good value to start is 50 and gradually increase until the
/// sum to diff property is within the desired error range.
///
/// Valid range of its value is [1, 100], inclusively.
#[prost(int32, tag = "1")]
pub step_count: i32,
/// Config for SmoothGrad approximation of gradients.
///
/// When enabled, the gradients are approximated by averaging the gradients
/// from noisy samples in the vicinity of the inputs. Adding
/// noise can help improve the computed gradients. Refer to this paper for more
/// details: <https://arxiv.org/pdf/1706.03825.pdf>
#[prost(message, optional, tag = "2")]
pub smooth_grad_config: ::core::option::Option<SmoothGradConfig>,
}
/// An explanation method that redistributes Integrated Gradients
/// attributions to segmented regions, taking advantage of the model's fully
/// differentiable structure. Refer to this paper for more details:
/// <https://arxiv.org/abs/1906.02825>
///
/// Supported only by image Models.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct XraiAttribution {
/// Required. The number of steps for approximating the path integral.
/// A good value to start is 50 and gradually increase until the
/// sum to diff property is met within the desired error range.
///
/// Valid range of its value is [1, 100], inclusively.
#[prost(int32, tag = "1")]
pub step_count: i32,
/// Config for SmoothGrad approximation of gradients.
///
/// When enabled, the gradients are approximated by averaging the gradients
/// from noisy samples in the vicinity of the inputs. Adding
/// noise can help improve the computed gradients. Refer to this paper for more
/// details: <https://arxiv.org/pdf/1706.03825.pdf>
#[prost(message, optional, tag = "2")]
pub smooth_grad_config: ::core::option::Option<SmoothGradConfig>,
}
/// Config for SmoothGrad approximation of gradients.
///
/// When enabled, the gradients are approximated by averaging the gradients from
/// noisy samples in the vicinity of the inputs. Adding noise can help improve
/// the computed gradients. Refer to this paper for more details:
/// <https://arxiv.org/pdf/1706.03825.pdf>
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SmoothGradConfig {
/// The number of gradient samples to use for
/// approximation. The higher this number, the more accurate the gradient
/// is, but the runtime complexity increases by this factor as well.
/// Valid range of its value is [1, 50]. Defaults to 3.
#[prost(int32, tag = "3")]
pub noisy_sample_count: i32,
/// Represents the standard deviation of the gaussian kernel
/// that will be used to add noise to the interpolated inputs
/// prior to computing gradients.
#[prost(oneof = "smooth_grad_config::GradientNoiseSigma", tags = "1, 2")]
pub gradient_noise_sigma: ::core::option::Option<smooth_grad_config::GradientNoiseSigma>,
}
/// Nested message and enum types in `SmoothGradConfig`.
pub mod smooth_grad_config {
/// Represents the standard deviation of the gaussian kernel
/// that will be used to add noise to the interpolated inputs
/// prior to computing gradients.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum GradientNoiseSigma {
/// This is a single float value and will be used to add noise to all the
/// features. Use this field when all features are normalized to have the
/// same distribution: scale to range [0, 1], [-1, 1] or z-scoring, where
/// features are normalized to have 0-mean and 1-variance. Learn more about
/// \[normalization\](<https://developers.google.com/machine-learning/data-prep/transform/normalization>).
///
/// For best results the recommended value is about 10% - 20% of the standard
/// deviation of the input feature. Refer to section 3.2 of the SmoothGrad
/// paper: <https://arxiv.org/pdf/1706.03825.pdf.> Defaults to 0.1.
///
/// If the distribution is different per feature, set
/// \[feature_noise_sigma][google.cloud.aiplatform.v1.SmoothGradConfig.feature_noise_sigma\] instead
/// for each feature.
#[prost(float, tag = "1")]
NoiseSigma(f32),
/// This is similar to \[noise_sigma][google.cloud.aiplatform.v1.SmoothGradConfig.noise_sigma\], but
/// provides additional flexibility. A separate noise sigma can be provided
/// for each feature, which is useful if their distributions are different.
/// No noise is added to features that are not set. If this field is unset,
/// \[noise_sigma][google.cloud.aiplatform.v1.SmoothGradConfig.noise_sigma\] will be used for all
/// features.
#[prost(message, tag = "2")]
FeatureNoiseSigma(super::FeatureNoiseSigma),
}
}
/// Noise sigma by features. Noise sigma represents the standard deviation of the
/// gaussian kernel that will be used to add noise to interpolated inputs prior
/// to computing gradients.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FeatureNoiseSigma {
/// Noise sigma per feature. No noise is added to features that are not set.
#[prost(message, repeated, tag = "1")]
pub noise_sigma: ::prost::alloc::vec::Vec<feature_noise_sigma::NoiseSigmaForFeature>,
}
/// Nested message and enum types in `FeatureNoiseSigma`.
pub mod feature_noise_sigma {
/// Noise sigma for a single feature.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct NoiseSigmaForFeature {
/// The name of the input feature for which noise sigma is provided. The
/// features are defined in
/// [explanation metadata inputs]\[google.cloud.aiplatform.v1.ExplanationMetadata.inputs\].
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// This represents the standard deviation of the Gaussian kernel that will
/// be used to add noise to the feature prior to computing gradients. Similar
/// to \[noise_sigma][google.cloud.aiplatform.v1.SmoothGradConfig.noise_sigma\] but represents the
/// noise added to the current feature. Defaults to 0.1.
#[prost(float, tag = "2")]
pub sigma: f32,
}
}
/// The \[ExplanationSpec][google.cloud.aiplatform.v1.ExplanationSpec\] entries that can be overridden at
/// [online explanation]\[google.cloud.aiplatform.v1.PredictionService.Explain\] time.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExplanationSpecOverride {
/// The parameters to be overridden. Note that the
/// \[method][google.cloud.aiplatform.v1.ExplanationParameters.method\] cannot be changed. If not specified,
/// no parameter is overridden.
#[prost(message, optional, tag = "1")]
pub parameters: ::core::option::Option<ExplanationParameters>,
/// The metadata to be overridden. If not specified, no metadata is overridden.
#[prost(message, optional, tag = "2")]
pub metadata: ::core::option::Option<ExplanationMetadataOverride>,
}
/// The \[ExplanationMetadata][google.cloud.aiplatform.v1.ExplanationMetadata\] entries that can be overridden at
/// [online explanation]\[google.cloud.aiplatform.v1.PredictionService.Explain\] time.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExplanationMetadataOverride {
/// Required. Overrides the [input metadata]\[google.cloud.aiplatform.v1.ExplanationMetadata.inputs\] of the features.
/// The key is the name of the feature to be overridden. The keys specified
/// here must exist in the input metadata to be overridden. If a feature is
/// not specified here, the corresponding feature's input metadata is not
/// overridden.
#[prost(map = "string, message", tag = "1")]
pub inputs: ::std::collections::HashMap<
::prost::alloc::string::String,
explanation_metadata_override::InputMetadataOverride,
>,
}
/// Nested message and enum types in `ExplanationMetadataOverride`.
pub mod explanation_metadata_override {
/// The [input metadata]\[google.cloud.aiplatform.v1.ExplanationMetadata.InputMetadata\] entries to be
/// overridden.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct InputMetadataOverride {
/// Baseline inputs for this feature.
///
/// This overrides the `input_baseline` field of the
/// \[ExplanationMetadata.InputMetadata][google.cloud.aiplatform.v1.ExplanationMetadata.InputMetadata\]
/// object of the corresponding feature's input metadata. If it's not
/// specified, the original baselines are not overridden.
#[prost(message, repeated, tag = "1")]
pub input_baselines: ::prost::alloc::vec::Vec<::prost_types::Value>,
}
}
/// Describes the state of a job.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum JobState {
/// The job state is unspecified.
Unspecified = 0,
/// The job has been just created or resumed and processing has not yet begun.
Queued = 1,
/// The service is preparing to run the job.
Pending = 2,
/// The job is in progress.
Running = 3,
/// The job completed successfully.
Succeeded = 4,
/// The job failed.
Failed = 5,
/// The job is being cancelled. From this state the job may only go to
/// either `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED` or `JOB_STATE_CANCELLED`.
Cancelling = 6,
/// The job has been cancelled.
Cancelled = 7,
/// The job has been stopped, and can be resumed.
Paused = 8,
/// The job has expired.
Expired = 9,
}
/// Specification of a single machine.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MachineSpec {
/// Immutable. The type of the machine.
///
/// See the [list of machine types supported for
/// prediction](<https://cloud.google.com/vertex-ai/docs/predictions/configure-compute#machine-types>)
///
/// See the [list of machine types supported for custom
/// training](<https://cloud.google.com/vertex-ai/docs/training/configure-compute#machine-types>).
///
/// For \[DeployedModel][google.cloud.aiplatform.v1.DeployedModel\] this field is optional, and the default
/// value is `n1-standard-2`. For \[BatchPredictionJob][google.cloud.aiplatform.v1.BatchPredictionJob\] or as part of
/// \[WorkerPoolSpec][google.cloud.aiplatform.v1.WorkerPoolSpec\] this field is required.
#[prost(string, tag = "1")]
pub machine_type: ::prost::alloc::string::String,
/// Immutable. The type of accelerator(s) that may be attached to the machine as per
/// \[accelerator_count][google.cloud.aiplatform.v1.MachineSpec.accelerator_count\].
#[prost(enumeration = "AcceleratorType", tag = "2")]
pub accelerator_type: i32,
/// The number of accelerators to attach to the machine.
#[prost(int32, tag = "3")]
pub accelerator_count: i32,
}
/// A description of resources that are dedicated to a DeployedModel, and
/// that need a higher degree of manual configuration.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DedicatedResources {
/// Required. Immutable. The specification of a single machine used by the prediction.
#[prost(message, optional, tag = "1")]
pub machine_spec: ::core::option::Option<MachineSpec>,
/// Required. Immutable. The minimum number of machine replicas this DeployedModel will be always
/// deployed on. This value must be greater than or equal to 1.
///
/// If traffic against the DeployedModel increases, it may dynamically be
/// deployed onto more replicas, and as traffic decreases, some of these extra
/// replicas may be freed.
#[prost(int32, tag = "2")]
pub min_replica_count: i32,
/// Immutable. The maximum number of replicas this DeployedModel may be deployed on when
/// the traffic against it increases. If the requested value is too large,
/// the deployment will error, but if deployment succeeds then the ability
/// to scale the model to that many replicas is guaranteed (barring service
/// outages). If traffic against the DeployedModel increases beyond what its
/// replicas at maximum may handle, a portion of the traffic will be dropped.
/// If this value is not provided, will use \[min_replica_count][google.cloud.aiplatform.v1.DedicatedResources.min_replica_count\] as the
/// default value.
#[prost(int32, tag = "3")]
pub max_replica_count: i32,
/// Immutable. The metric specifications that overrides a resource
/// utilization metric (CPU utilization, accelerator's duty cycle, and so on)
/// target value (default to 60 if not set). At most one entry is allowed per
/// metric.
///
/// If \[machine_spec.accelerator_count][google.cloud.aiplatform.v1.MachineSpec.accelerator_count\] is
/// above 0, the autoscaling will be based on both CPU utilization and
/// accelerator's duty cycle metrics and scale up when either metrics exceeds
/// its target value while scale down if both metrics are under their target
/// value. The default target value is 60 for both metrics.
///
/// If \[machine_spec.accelerator_count][google.cloud.aiplatform.v1.MachineSpec.accelerator_count\] is
/// 0, the autoscaling will be based on CPU utilization metric only with
/// default target value 60 if not explicitly set.
///
/// For example, in the case of Online Prediction, if you want to override
/// target CPU utilization to 80, you should set
/// \[autoscaling_metric_specs.metric_name][google.cloud.aiplatform.v1.AutoscalingMetricSpec.metric_name\]
/// to `aiplatform.googleapis.com/prediction/online/cpu/utilization` and
/// \[autoscaling_metric_specs.target][google.cloud.aiplatform.v1.AutoscalingMetricSpec.target\] to `80`.
#[prost(message, repeated, tag = "4")]
pub autoscaling_metric_specs: ::prost::alloc::vec::Vec<AutoscalingMetricSpec>,
}
/// A description of resources that to large degree are decided by Vertex AI,
/// and require only a modest additional configuration.
/// Each Model supporting these resources documents its specific guidelines.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AutomaticResources {
/// Immutable. The minimum number of replicas this DeployedModel will be always deployed
/// on. If traffic against it increases, it may dynamically be deployed onto
/// more replicas up to \[max_replica_count][google.cloud.aiplatform.v1.AutomaticResources.max_replica_count\], and as traffic decreases, some
/// of these extra replicas may be freed.
/// If the requested value is too large, the deployment will error.
#[prost(int32, tag = "1")]
pub min_replica_count: i32,
/// Immutable. The maximum number of replicas this DeployedModel may be deployed on when
/// the traffic against it increases. If the requested value is too large,
/// the deployment will error, but if deployment succeeds then the ability
/// to scale the model to that many replicas is guaranteed (barring service
/// outages). If traffic against the DeployedModel increases beyond what its
/// replicas at maximum may handle, a portion of the traffic will be dropped.
/// If this value is not provided, a no upper bound for scaling under heavy
/// traffic will be assume, though Vertex AI may be unable to scale beyond
/// certain replica number.
#[prost(int32, tag = "2")]
pub max_replica_count: i32,
}
/// A description of resources that are used for performing batch operations, are
/// dedicated to a Model, and need manual configuration.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BatchDedicatedResources {
/// Required. Immutable. The specification of a single machine.
#[prost(message, optional, tag = "1")]
pub machine_spec: ::core::option::Option<MachineSpec>,
/// Immutable. The number of machine replicas used at the start of the batch operation.
/// If not set, Vertex AI decides starting number, not greater than
/// \[max_replica_count][google.cloud.aiplatform.v1.BatchDedicatedResources.max_replica_count\]
#[prost(int32, tag = "2")]
pub starting_replica_count: i32,
/// Immutable. The maximum number of machine replicas the batch operation may be scaled
/// to. The default value is 10.
#[prost(int32, tag = "3")]
pub max_replica_count: i32,
}
/// Statistics information about resource consumption.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ResourcesConsumed {
/// Output only. The number of replica hours used. Note that many replicas may run in
/// parallel, and additionally any given work may be queued for some time.
/// Therefore this value is not strictly related to wall time.
#[prost(double, tag = "1")]
pub replica_hours: f64,
}
/// Represents the spec of disk options.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DiskSpec {
/// Type of the boot disk (default is "pd-ssd").
/// Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or
/// "pd-standard" (Persistent Disk Hard Disk Drive).
#[prost(string, tag = "1")]
pub boot_disk_type: ::prost::alloc::string::String,
/// Size in GB of the boot disk (default is 100GB).
#[prost(int32, tag = "2")]
pub boot_disk_size_gb: i32,
}
/// The metric specification that defines the target resource utilization
/// (CPU utilization, accelerator's duty cycle, and so on) for calculating the
/// desired replica count.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AutoscalingMetricSpec {
/// Required. The resource metric name.
/// Supported metrics:
///
/// * For Online Prediction:
/// * `aiplatform.googleapis.com/prediction/online/accelerator/duty_cycle`
/// * `aiplatform.googleapis.com/prediction/online/cpu/utilization`
#[prost(string, tag = "1")]
pub metric_name: ::prost::alloc::string::String,
/// The target resource utilization in percentage (1% - 100%) for the given
/// metric; once the real usage deviates from the target by a certain
/// percentage, the machine replicas change. The default value is 60
/// (representing 60%) if not provided.
#[prost(int32, tag = "2")]
pub target: i32,
}
/// Manual batch tuning parameters.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ManualBatchTuningParameters {
/// Immutable. The number of the records (e.g. instances) of the operation given in
/// each batch to a machine replica. Machine type, and size of a single
/// record should be considered when setting this parameter, higher value
/// speeds up the batch operation's execution, but too high value will result
/// in a whole batch not fitting in a machine's memory, and the whole
/// operation will fail.
/// The default value is 4.
#[prost(int32, tag = "1")]
pub batch_size: i32,
}
/// Next ID: 6
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ModelMonitoringObjectiveConfig {
/// Training dataset for models. This field has to be set only if
/// TrainingPredictionSkewDetectionConfig is specified.
#[prost(message, optional, tag = "1")]
pub training_dataset:
::core::option::Option<model_monitoring_objective_config::TrainingDataset>,
/// The config for skew between training data and prediction data.
#[prost(message, optional, tag = "2")]
pub training_prediction_skew_detection_config: ::core::option::Option<
model_monitoring_objective_config::TrainingPredictionSkewDetectionConfig,
>,
/// The config for drift of prediction data.
#[prost(message, optional, tag = "3")]
pub prediction_drift_detection_config:
::core::option::Option<model_monitoring_objective_config::PredictionDriftDetectionConfig>,
/// The config for integrated with Explainable AI.
#[prost(message, optional, tag = "5")]
pub explanation_config:
::core::option::Option<model_monitoring_objective_config::ExplanationConfig>,
}
/// Nested message and enum types in `ModelMonitoringObjectiveConfig`.
pub mod model_monitoring_objective_config {
/// Training Dataset information.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TrainingDataset {
/// Data format of the dataset, only applicable if the input is from
/// Google Cloud Storage.
/// The possible formats are:
///
/// "tf-record"
/// The source file is a TFRecord file.
///
/// "csv"
/// The source file is a CSV file.
#[prost(string, tag = "2")]
pub data_format: ::prost::alloc::string::String,
/// The target field name the model is to predict.
/// This field will be excluded when doing Predict and (or) Explain for the
/// training data.
#[prost(string, tag = "6")]
pub target_field: ::prost::alloc::string::String,
/// Strategy to sample data from Training Dataset.
/// If not set, we process the whole dataset.
#[prost(message, optional, tag = "7")]
pub logging_sampling_strategy: ::core::option::Option<super::SamplingStrategy>,
#[prost(oneof = "training_dataset::DataSource", tags = "3, 4, 5")]
pub data_source: ::core::option::Option<training_dataset::DataSource>,
}
/// Nested message and enum types in `TrainingDataset`.
pub mod training_dataset {
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum DataSource {
/// The resource name of the Dataset used to train this Model.
#[prost(string, tag = "3")]
Dataset(::prost::alloc::string::String),
/// The Google Cloud Storage uri of the unmanaged Dataset used to train
/// this Model.
#[prost(message, tag = "4")]
GcsSource(super::super::GcsSource),
/// The BigQuery table of the unmanaged Dataset used to train this
/// Model.
#[prost(message, tag = "5")]
BigquerySource(super::super::BigQuerySource),
}
}
/// The config for Training & Prediction data skew detection. It specifies the
/// training dataset sources and the skew detection parameters.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TrainingPredictionSkewDetectionConfig {
/// Key is the feature name and value is the threshold. If a feature needs to
/// be monitored for skew, a value threshold must be configured for that
/// feature. The threshold here is against feature distribution distance
/// between the training and prediction feature.
#[prost(map = "string, message", tag = "1")]
pub skew_thresholds:
::std::collections::HashMap<::prost::alloc::string::String, super::ThresholdConfig>,
/// Key is the feature name and value is the threshold. The threshold here is
/// against attribution score distance between the training and prediction
/// feature.
#[prost(map = "string, message", tag = "2")]
pub attribution_score_skew_thresholds:
::std::collections::HashMap<::prost::alloc::string::String, super::ThresholdConfig>,
}
/// The config for Prediction data drift detection.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PredictionDriftDetectionConfig {
/// Key is the feature name and value is the threshold. If a feature needs to
/// be monitored for drift, a value threshold must be configured for that
/// feature. The threshold here is against feature distribution distance
/// between different time windws.
#[prost(map = "string, message", tag = "1")]
pub drift_thresholds:
::std::collections::HashMap<::prost::alloc::string::String, super::ThresholdConfig>,
/// Key is the feature name and value is the threshold. The threshold here is
/// against attribution score distance between different time windows.
#[prost(map = "string, message", tag = "2")]
pub attribution_score_drift_thresholds:
::std::collections::HashMap<::prost::alloc::string::String, super::ThresholdConfig>,
}
/// The config for integrated with Explainable AI. Only applicable if the Model
/// has explanation_spec populated.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExplanationConfig {
/// If want to analyze the Explainable AI feature attribute scores or not.
/// If set to true, Vertex AI will log the feature attributions from
/// explain response and do the skew/drift detection for them.
#[prost(bool, tag = "1")]
pub enable_feature_attributes: bool,
/// Predictions generated by the BatchPredictionJob using baseline dataset.
#[prost(message, optional, tag = "2")]
pub explanation_baseline: ::core::option::Option<explanation_config::ExplanationBaseline>,
}
/// Nested message and enum types in `ExplanationConfig`.
pub mod explanation_config {
/// Output from \[BatchPredictionJob][google.cloud.aiplatform.v1.BatchPredictionJob\] for Model Monitoring baseline dataset,
/// which can be used to generate baseline attribution scores.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExplanationBaseline {
/// The storage format of the predictions generated BatchPrediction job.
#[prost(enumeration = "explanation_baseline::PredictionFormat", tag = "1")]
pub prediction_format: i32,
/// The configuration specifying of BatchExplain job output. This can be
/// used to generate the baseline of feature attribution scores.
#[prost(oneof = "explanation_baseline::Destination", tags = "2, 3")]
pub destination: ::core::option::Option<explanation_baseline::Destination>,
}
/// Nested message and enum types in `ExplanationBaseline`.
pub mod explanation_baseline {
/// The storage format of the predictions generated BatchPrediction job.
#[derive(
Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
)]
#[repr(i32)]
pub enum PredictionFormat {
/// Should not be set.
Unspecified = 0,
/// Predictions are in JSONL files.
Jsonl = 2,
/// Predictions are in BigQuery.
Bigquery = 3,
}
/// The configuration specifying of BatchExplain job output. This can be
/// used to generate the baseline of feature attribution scores.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Destination {
/// Cloud Storage location for BatchExplain output.
#[prost(message, tag = "2")]
Gcs(super::super::super::GcsDestination),
/// BigQuery location for BatchExplain output.
#[prost(message, tag = "3")]
Bigquery(super::super::super::BigQueryDestination),
}
}
}
}
/// Next ID: 2
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ModelMonitoringAlertConfig {
#[prost(oneof = "model_monitoring_alert_config::Alert", tags = "1")]
pub alert: ::core::option::Option<model_monitoring_alert_config::Alert>,
}
/// Nested message and enum types in `ModelMonitoringAlertConfig`.
pub mod model_monitoring_alert_config {
/// The config for email alert.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct EmailAlertConfig {
/// The email addresses to send the alert.
#[prost(string, repeated, tag = "1")]
pub user_emails: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Alert {
/// Email alert config.
#[prost(message, tag = "1")]
EmailAlertConfig(EmailAlertConfig),
}
}
/// The config for feature monitoring threshold.
/// Next ID: 3
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ThresholdConfig {
#[prost(oneof = "threshold_config::Threshold", tags = "1")]
pub threshold: ::core::option::Option<threshold_config::Threshold>,
}
/// Nested message and enum types in `ThresholdConfig`.
pub mod threshold_config {
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Threshold {
/// Specify a threshold value that can trigger the alert.
/// If this threshold config is for feature distribution distance:
/// 1. For categorical feature, the distribution distance is calculated by
/// L-inifinity norm.
/// 2. For numerical feature, the distribution distance is calculated by
/// Jensen–Shannon divergence.
/// Each feature must have a non-zero threshold if they need to be monitored.
/// Otherwise no alert will be triggered for that feature.
#[prost(double, tag = "1")]
Value(f64),
}
}
/// Sampling Strategy for logging, can be for both training and prediction
/// dataset.
/// Next ID: 2
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SamplingStrategy {
/// Random sample config. Will support more sampling strategies later.
#[prost(message, optional, tag = "1")]
pub random_sample_config: ::core::option::Option<sampling_strategy::RandomSampleConfig>,
}
/// Nested message and enum types in `SamplingStrategy`.
pub mod sampling_strategy {
/// Requests are randomly selected.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct RandomSampleConfig {
/// Sample rate (0, 1]
#[prost(double, tag = "1")]
pub sample_rate: f64,
}
}
/// A job that uses a \[Model][google.cloud.aiplatform.v1.BatchPredictionJob.model\] to produce predictions
/// on multiple [input instances]\[google.cloud.aiplatform.v1.BatchPredictionJob.input_config\]. If
/// predictions for significant portion of the instances fail, the job may finish
/// without attempting predictions for all remaining instances.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BatchPredictionJob {
/// Output only. Resource name of the BatchPredictionJob.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. The user-defined name of this BatchPredictionJob.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// Required. The name of the Model that produces the predictions via this job,
/// must share the same ancestor Location.
/// Starting this job has no impact on any existing deployments of the Model
/// and their resources.
#[prost(string, tag = "3")]
pub model: ::prost::alloc::string::String,
/// Required. Input configuration of the instances on which predictions are performed.
/// The schema of any single instance may be specified via
/// the \[Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model\]
/// \[PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata\]
/// \[instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri\].
#[prost(message, optional, tag = "4")]
pub input_config: ::core::option::Option<batch_prediction_job::InputConfig>,
/// The parameters that govern the predictions. The schema of the parameters
/// may be specified via the \[Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model\]
/// \[PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata\]
/// \[parameters_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri\].
#[prost(message, optional, tag = "5")]
pub model_parameters: ::core::option::Option<::prost_types::Value>,
/// Required. The Configuration specifying where output predictions should
/// be written.
/// The schema of any single prediction may be specified as a concatenation
/// of \[Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model\]
/// \[PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata\]
/// \[instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri\]
/// and
/// \[prediction_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.prediction_schema_uri\].
#[prost(message, optional, tag = "6")]
pub output_config: ::core::option::Option<batch_prediction_job::OutputConfig>,
/// The config of resources used by the Model during the batch prediction. If
/// the Model \[supports][google.cloud.aiplatform.v1.Model.supported_deployment_resources_types\]
/// DEDICATED_RESOURCES this config may be provided (and the job will use these
/// resources), if the Model doesn't support AUTOMATIC_RESOURCES, this config
/// must be provided.
#[prost(message, optional, tag = "7")]
pub dedicated_resources: ::core::option::Option<BatchDedicatedResources>,
/// Immutable. Parameters configuring the batch behavior. Currently only applicable when
/// \[dedicated_resources][google.cloud.aiplatform.v1.BatchPredictionJob.dedicated_resources\] are used (in other cases Vertex AI does
/// the tuning itself).
#[prost(message, optional, tag = "8")]
pub manual_batch_tuning_parameters: ::core::option::Option<ManualBatchTuningParameters>,
/// Generate explanation with the batch prediction results.
///
/// When set to `true`, the batch prediction output changes based on the
/// `predictions_format` field of the
/// \[BatchPredictionJob.output_config][google.cloud.aiplatform.v1.BatchPredictionJob.output_config\] object:
///
/// * `bigquery`: output includes a column named `explanation`. The value
/// is a struct that conforms to the \[Explanation][google.cloud.aiplatform.v1.Explanation\] object.
/// * `jsonl`: The JSON objects on each line include an additional entry
/// keyed `explanation`. The value of the entry is a JSON object that
/// conforms to the \[Explanation][google.cloud.aiplatform.v1.Explanation\] object.
/// * `csv`: Generating explanations for CSV format is not supported.
///
/// If this field is set to true, either the \[Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec\] or
/// \[explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec\] must be populated.
#[prost(bool, tag = "23")]
pub generate_explanation: bool,
/// Explanation configuration for this BatchPredictionJob. Can be
/// specified only if \[generate_explanation][google.cloud.aiplatform.v1.BatchPredictionJob.generate_explanation\] is set to `true`.
///
/// This value overrides the value of \[Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec\]. All fields of
/// \[explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec\] are optional in the request. If a field of the
/// \[explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec\] object is not populated, the corresponding field of
/// the \[Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec\] object is inherited.
#[prost(message, optional, tag = "25")]
pub explanation_spec: ::core::option::Option<ExplanationSpec>,
/// Output only. Information further describing the output of this job.
#[prost(message, optional, tag = "9")]
pub output_info: ::core::option::Option<batch_prediction_job::OutputInfo>,
/// Output only. The detailed state of the job.
#[prost(enumeration = "JobState", tag = "10")]
pub state: i32,
/// Output only. Only populated when the job's state is JOB_STATE_FAILED or
/// JOB_STATE_CANCELLED.
#[prost(message, optional, tag = "11")]
pub error: ::core::option::Option<super::super::super::rpc::Status>,
/// Output only. Partial failures encountered.
/// For example, single files that can't be read.
/// This field never exceeds 20 entries.
/// Status details fields contain standard GCP error details.
#[prost(message, repeated, tag = "12")]
pub partial_failures: ::prost::alloc::vec::Vec<super::super::super::rpc::Status>,
/// Output only. Information about resources that had been consumed by this job.
/// Provided in real time at best effort basis, as well as a final value
/// once the job completes.
///
/// Note: This field currently may be not populated for batch predictions that
/// use AutoML Models.
#[prost(message, optional, tag = "13")]
pub resources_consumed: ::core::option::Option<ResourcesConsumed>,
/// Output only. Statistics on completed and failed prediction instances.
#[prost(message, optional, tag = "14")]
pub completion_stats: ::core::option::Option<CompletionStats>,
/// Output only. Time when the BatchPredictionJob was created.
#[prost(message, optional, tag = "15")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Time when the BatchPredictionJob for the first time entered the
/// `JOB_STATE_RUNNING` state.
#[prost(message, optional, tag = "16")]
pub start_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Time when the BatchPredictionJob entered any of the following states:
/// `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`.
#[prost(message, optional, tag = "17")]
pub end_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Time when the BatchPredictionJob was most recently updated.
#[prost(message, optional, tag = "18")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// The labels with user-defined metadata to organize BatchPredictionJobs.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
///
/// See <https://goo.gl/xmQnxf> for more information and examples of labels.
#[prost(map = "string, string", tag = "19")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Customer-managed encryption key options for a BatchPredictionJob. If this
/// is set, then all resources created by the BatchPredictionJob will be
/// encrypted with the provided encryption key.
#[prost(message, optional, tag = "24")]
pub encryption_spec: ::core::option::Option<EncryptionSpec>,
}
/// Nested message and enum types in `BatchPredictionJob`.
pub mod batch_prediction_job {
/// Configures the input to \[BatchPredictionJob][google.cloud.aiplatform.v1.BatchPredictionJob\].
/// See \[Model.supported_input_storage_formats][google.cloud.aiplatform.v1.Model.supported_input_storage_formats\] for Model's supported input
/// formats, and how instances should be expressed via any of them.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct InputConfig {
/// Required. The format in which instances are given, must be one of the
/// \[Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model\]
/// \[supported_input_storage_formats][google.cloud.aiplatform.v1.Model.supported_input_storage_formats\].
#[prost(string, tag = "1")]
pub instances_format: ::prost::alloc::string::String,
/// Required. The source of the input.
#[prost(oneof = "input_config::Source", tags = "2, 3")]
pub source: ::core::option::Option<input_config::Source>,
}
/// Nested message and enum types in `InputConfig`.
pub mod input_config {
/// Required. The source of the input.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Source {
/// The Cloud Storage location for the input instances.
#[prost(message, tag = "2")]
GcsSource(super::super::GcsSource),
/// The BigQuery location of the input table.
/// The schema of the table should be in the format described by the given
/// context OpenAPI Schema, if one is provided. The table may contain
/// additional columns that are not described by the schema, and they will
/// be ignored.
#[prost(message, tag = "3")]
BigquerySource(super::super::BigQuerySource),
}
}
/// Configures the output of \[BatchPredictionJob][google.cloud.aiplatform.v1.BatchPredictionJob\].
/// See \[Model.supported_output_storage_formats][google.cloud.aiplatform.v1.Model.supported_output_storage_formats\] for supported output
/// formats, and how predictions are expressed via any of them.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct OutputConfig {
/// Required. The format in which Vertex AI gives the predictions, must be one of the
/// \[Model's][google.cloud.aiplatform.v1.BatchPredictionJob.model\]
/// \[supported_output_storage_formats][google.cloud.aiplatform.v1.Model.supported_output_storage_formats\].
#[prost(string, tag = "1")]
pub predictions_format: ::prost::alloc::string::String,
/// Required. The destination of the output.
#[prost(oneof = "output_config::Destination", tags = "2, 3")]
pub destination: ::core::option::Option<output_config::Destination>,
}
/// Nested message and enum types in `OutputConfig`.
pub mod output_config {
/// Required. The destination of the output.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Destination {
/// The Cloud Storage location of the directory where the output is
/// to be written to. In the given directory a new directory is created.
/// Its name is `prediction-<model-display-name>-<job-create-time>`,
/// where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
/// Inside of it files `predictions_0001.<extension>`,
/// `predictions_0002.<extension>`, ..., `predictions_N.<extension>`
/// are created where `<extension>` depends on chosen
/// \[predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format\], and N may equal 0001 and depends on the total
/// number of successfully predicted instances.
/// If the Model has both \[instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri\]
/// and \[prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri\] schemata
/// defined then each such file contains predictions as per the
/// \[predictions_format][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.predictions_format\].
/// If prediction for any instance failed (partially or completely), then
/// an additional `errors_0001.<extension>`, `errors_0002.<extension>`,...,
/// `errors_N.<extension>` files are created (N depends on total number
/// of failed predictions). These files contain the failed instances,
/// as per their schema, followed by an additional `error` field which as
/// value has \[google.rpc.Status][google.rpc.Status\]
/// containing only `code` and `message` fields.
#[prost(message, tag = "2")]
GcsDestination(super::super::GcsDestination),
/// The BigQuery project or dataset location where the output is to be
/// written to. If project is provided, a new dataset is created with name
/// `prediction_<model-display-name>_<job-create-time>`
/// where <model-display-name> is made
/// BigQuery-dataset-name compatible (for example, most special characters
/// become underscores), and timestamp is in
/// YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset
/// two tables will be created, `predictions`, and `errors`.
/// If the Model has both \[instance][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri\]
/// and \[prediction][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri\] schemata
/// defined then the tables have columns as follows: The `predictions`
/// table contains instances for which the prediction succeeded, it
/// has columns as per a concatenation of the Model's instance and
/// prediction schemata. The `errors` table contains rows for which the
/// prediction has failed, it has instance columns, as per the
/// instance schema, followed by a single "errors" column, which as values
/// has \[google.rpc.Status][google.rpc.Status\]
/// represented as a STRUCT, and containing only `code` and `message`.
#[prost(message, tag = "3")]
BigqueryDestination(super::super::BigQueryDestination),
}
}
/// Further describes this job's output.
/// Supplements \[output_config][google.cloud.aiplatform.v1.BatchPredictionJob.output_config\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct OutputInfo {
/// Output only. The name of the BigQuery table created, in
/// `predictions_<timestamp>`
/// format, into which the prediction output is written.
/// Can be used by UI to generate the BigQuery output path, for example.
#[prost(string, tag = "4")]
pub bigquery_output_table: ::prost::alloc::string::String,
/// The output location into which prediction output is written.
#[prost(oneof = "output_info::OutputLocation", tags = "1, 2")]
pub output_location: ::core::option::Option<output_info::OutputLocation>,
}
/// Nested message and enum types in `OutputInfo`.
pub mod output_info {
/// The output location into which prediction output is written.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum OutputLocation {
/// Output only. The full path of the Cloud Storage directory created, into which
/// the prediction output is written.
#[prost(string, tag = "1")]
GcsOutputDirectory(::prost::alloc::string::String),
/// Output only. The path of the BigQuery dataset created, in
/// `bq://projectId.bqDatasetId`
/// format, into which the prediction output is written.
#[prost(string, tag = "2")]
BigqueryOutputDataset(::prost::alloc::string::String),
}
}
}
/// Instance of a general context.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Context {
/// Output only. The resource name of the Context.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// User provided display name of the Context.
/// May be up to 128 Unicode characters.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// An eTag used to perform consistent read-modify-write updates. If not set, a
/// blind "overwrite" update happens.
#[prost(string, tag = "8")]
pub etag: ::prost::alloc::string::String,
/// The labels with user-defined metadata to organize your Contexts.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
/// No more than 64 user labels can be associated with one Context (System
/// labels are excluded).
#[prost(map = "string, string", tag = "9")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Output only. Timestamp when this Context was created.
#[prost(message, optional, tag = "10")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this Context was last updated.
#[prost(message, optional, tag = "11")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. A list of resource names of Contexts that are parents of this Context.
/// A Context may have at most 10 parent_contexts.
#[prost(string, repeated, tag = "12")]
pub parent_contexts: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The title of the schema describing the metadata.
///
/// Schema title and version is expected to be registered in earlier Create
/// Schema calls. And both are used together as unique identifiers to identify
/// schemas within the local metadata store.
#[prost(string, tag = "13")]
pub schema_title: ::prost::alloc::string::String,
/// The version of the schema in schema_name to use.
///
/// Schema title and version is expected to be registered in earlier Create
/// Schema calls. And both are used together as unique identifiers to identify
/// schemas within the local metadata store.
#[prost(string, tag = "14")]
pub schema_version: ::prost::alloc::string::String,
/// Properties of the Context.
/// The size of this field should not exceed 200KB.
#[prost(message, optional, tag = "15")]
pub metadata: ::core::option::Option<::prost_types::Struct>,
/// Description of the Context
#[prost(string, tag = "16")]
pub description: ::prost::alloc::string::String,
}
/// Represents an environment variable present in a Container or Python Module.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct EnvVar {
/// Required. Name of the environment variable. Must be a valid C identifier.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. Variables that reference a $(VAR_NAME) are expanded
/// using the previous defined environment variables in the container and
/// any service environment variables. If a variable cannot be resolved,
/// the reference in the input string will be unchanged. The $(VAR_NAME)
/// syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped
/// references will never be expanded, regardless of whether the variable
/// exists or not.
#[prost(string, tag = "2")]
pub value: ::prost::alloc::string::String,
}
/// Represents a job that runs custom workloads such as a Docker container or a
/// Python package. A CustomJob can have multiple worker pools and each worker
/// pool can have its own machine and input spec. A CustomJob will be cleaned up
/// once the job enters terminal state (failed or succeeded).
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CustomJob {
/// Output only. Resource name of a CustomJob.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. The display name of the CustomJob.
/// The name can be up to 128 characters long and can be consist of any UTF-8
/// characters.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// Required. Job spec.
#[prost(message, optional, tag = "4")]
pub job_spec: ::core::option::Option<CustomJobSpec>,
/// Output only. The detailed state of the job.
#[prost(enumeration = "JobState", tag = "5")]
pub state: i32,
/// Output only. Time when the CustomJob was created.
#[prost(message, optional, tag = "6")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Time when the CustomJob for the first time entered the
/// `JOB_STATE_RUNNING` state.
#[prost(message, optional, tag = "7")]
pub start_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Time when the CustomJob entered any of the following states:
/// `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`.
#[prost(message, optional, tag = "8")]
pub end_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Time when the CustomJob was most recently updated.
#[prost(message, optional, tag = "9")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Only populated when job's state is `JOB_STATE_FAILED` or
/// `JOB_STATE_CANCELLED`.
#[prost(message, optional, tag = "10")]
pub error: ::core::option::Option<super::super::super::rpc::Status>,
/// The labels with user-defined metadata to organize CustomJobs.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
///
/// See <https://goo.gl/xmQnxf> for more information and examples of labels.
#[prost(map = "string, string", tag = "11")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Customer-managed encryption key options for a CustomJob. If this is set,
/// then all resources created by the CustomJob will be encrypted with the
/// provided encryption key.
#[prost(message, optional, tag = "12")]
pub encryption_spec: ::core::option::Option<EncryptionSpec>,
/// Output only. URIs for accessing [interactive
/// shells](<https://cloud.google.com/vertex-ai/docs/training/monitor-debug-interactive-shell>)
/// (one URI for each training node). Only available if
/// \[job_spec.enable_web_access][google.cloud.aiplatform.v1.CustomJobSpec.enable_web_access\] is `true`.
///
/// The keys are names of each node in the training job; for example,
/// `workerpool0-0` for the primary node, `workerpool1-0` for the first node in
/// the second worker pool, and `workerpool1-1` for the second node in the
/// second worker pool.
///
/// The values are the URIs for each node's interactive shell.
#[prost(map = "string, string", tag = "16")]
pub web_access_uris:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
}
/// Represents the spec of a CustomJob.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CustomJobSpec {
/// Required. The spec of the worker pools including machine type and Docker image.
/// All worker pools except the first one are optional and can be skipped by
/// providing an empty value.
#[prost(message, repeated, tag = "1")]
pub worker_pool_specs: ::prost::alloc::vec::Vec<WorkerPoolSpec>,
/// Scheduling options for a CustomJob.
#[prost(message, optional, tag = "3")]
pub scheduling: ::core::option::Option<Scheduling>,
/// Specifies the service account for workload run-as account.
/// Users submitting jobs must have act-as permission on this run-as account.
/// If unspecified, the [Vertex AI Custom Code Service
/// Agent](<https://cloud.google.com/vertex-ai/docs/general/access-control#service-agents>)
/// for the CustomJob's project is used.
#[prost(string, tag = "4")]
pub service_account: ::prost::alloc::string::String,
/// The full name of the Compute Engine
/// \[network\](/compute/docs/networks-and-firewalls#networks) to which the Job
/// should be peered. For example, `projects/12345/global/networks/myVPC`.
/// \[Format\](/compute/docs/reference/rest/v1/networks/insert)
/// is of the form `projects/{project}/global/networks/{network}`.
/// Where {project} is a project number, as in `12345`, and {network} is a
/// network name.
///
/// Private services access must already be configured for the network. If left
/// unspecified, the job is not peered with any network.
#[prost(string, tag = "5")]
pub network: ::prost::alloc::string::String,
/// The Cloud Storage location to store the output of this CustomJob or
/// HyperparameterTuningJob. For HyperparameterTuningJob,
/// the baseOutputDirectory of
/// each child CustomJob backing a Trial is set to a subdirectory of name
/// \[id][google.cloud.aiplatform.v1.Trial.id\] under its parent HyperparameterTuningJob's
/// baseOutputDirectory.
///
/// The following Vertex AI environment variables will be passed to
/// containers or python modules when this field is set:
///
/// For CustomJob:
///
/// * AIP_MODEL_DIR = `<base_output_directory>/model/`
/// * AIP_CHECKPOINT_DIR = `<base_output_directory>/checkpoints/`
/// * AIP_TENSORBOARD_LOG_DIR = `<base_output_directory>/logs/`
///
/// For CustomJob backing a Trial of HyperparameterTuningJob:
///
/// * AIP_MODEL_DIR = `<base_output_directory>/<trial_id>/model/`
/// * AIP_CHECKPOINT_DIR = `<base_output_directory>/<trial_id>/checkpoints/`
/// * AIP_TENSORBOARD_LOG_DIR = `<base_output_directory>/<trial_id>/logs/`
#[prost(message, optional, tag = "6")]
pub base_output_directory: ::core::option::Option<GcsDestination>,
/// Optional. The name of a Vertex AI \[Tensorboard][google.cloud.aiplatform.v1.Tensorboard\] resource to which this CustomJob
/// will upload Tensorboard logs.
/// Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}`
#[prost(string, tag = "7")]
pub tensorboard: ::prost::alloc::string::String,
/// Optional. Whether you want Vertex AI to enable [interactive shell
/// access](<https://cloud.google.com/vertex-ai/docs/training/monitor-debug-interactive-shell>)
/// to training containers.
///
/// If set to `true`, you can access interactive shells at the URIs given
/// by \[CustomJob.web_access_uris][google.cloud.aiplatform.v1.CustomJob.web_access_uris\] or \[Trial.web_access_uris][google.cloud.aiplatform.v1.Trial.web_access_uris\] (within
/// \[HyperparameterTuningJob.trials][google.cloud.aiplatform.v1.HyperparameterTuningJob.trials\]).
#[prost(bool, tag = "10")]
pub enable_web_access: bool,
}
/// Represents the spec of a worker pool in a job.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct WorkerPoolSpec {
/// Optional. Immutable. The specification of a single machine.
#[prost(message, optional, tag = "1")]
pub machine_spec: ::core::option::Option<MachineSpec>,
/// Optional. The number of worker replicas to use for this worker pool.
#[prost(int64, tag = "2")]
pub replica_count: i64,
/// Disk spec.
#[prost(message, optional, tag = "5")]
pub disk_spec: ::core::option::Option<DiskSpec>,
/// The custom task to be executed in this worker pool.
#[prost(oneof = "worker_pool_spec::Task", tags = "6, 7")]
pub task: ::core::option::Option<worker_pool_spec::Task>,
}
/// Nested message and enum types in `WorkerPoolSpec`.
pub mod worker_pool_spec {
/// The custom task to be executed in this worker pool.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Task {
/// The custom container task.
#[prost(message, tag = "6")]
ContainerSpec(super::ContainerSpec),
/// The Python packaged task.
#[prost(message, tag = "7")]
PythonPackageSpec(super::PythonPackageSpec),
}
}
/// The spec of a Container.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ContainerSpec {
/// Required. The URI of a container image in the Container Registry that is to be run on
/// each worker replica.
#[prost(string, tag = "1")]
pub image_uri: ::prost::alloc::string::String,
/// The command to be invoked when the container is started.
/// It overrides the entrypoint instruction in Dockerfile when provided.
#[prost(string, repeated, tag = "2")]
pub command: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The arguments to be passed when starting the container.
#[prost(string, repeated, tag = "3")]
pub args: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Environment variables to be passed to the container.
/// Maximum limit is 100.
#[prost(message, repeated, tag = "4")]
pub env: ::prost::alloc::vec::Vec<EnvVar>,
}
/// The spec of a Python packaged code.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PythonPackageSpec {
/// Required. The URI of a container image in Artifact Registry that will run the
/// provided Python package. Vertex AI provides a wide range of executor
/// images with pre-installed packages to meet users' various use cases. See
/// the list of [pre-built containers for
/// training](<https://cloud.google.com/vertex-ai/docs/training/pre-built-containers>).
/// You must use an image from this list.
#[prost(string, tag = "1")]
pub executor_image_uri: ::prost::alloc::string::String,
/// Required. The Google Cloud Storage location of the Python package files which are
/// the training program and its dependent packages.
/// The maximum number of package URIs is 100.
#[prost(string, repeated, tag = "2")]
pub package_uris: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Required. The Python module name to run after installing the packages.
#[prost(string, tag = "3")]
pub python_module: ::prost::alloc::string::String,
/// Command line arguments to be passed to the Python task.
#[prost(string, repeated, tag = "4")]
pub args: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Environment variables to be passed to the python module.
/// Maximum limit is 100.
#[prost(message, repeated, tag = "5")]
pub env: ::prost::alloc::vec::Vec<EnvVar>,
}
/// All parameters related to queuing and scheduling of custom jobs.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Scheduling {
/// The maximum job running time. The default is 7 days.
#[prost(message, optional, tag = "1")]
pub timeout: ::core::option::Option<::prost_types::Duration>,
/// Restarts the entire CustomJob if a worker gets restarted.
/// This feature can be used by distributed training jobs that are not
/// resilient to workers leaving and joining a job.
#[prost(bool, tag = "3")]
pub restart_job_on_worker_restart: bool,
}
/// A piece of data in a Dataset. Could be an image, a video, a document or plain
/// text.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DataItem {
/// Output only. The resource name of the DataItem.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Output only. Timestamp when this DataItem was created.
#[prost(message, optional, tag = "2")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this DataItem was last updated.
#[prost(message, optional, tag = "6")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// Optional. The labels with user-defined metadata to organize your DataItems.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
/// No more than 64 user labels can be associated with one DataItem(System
/// labels are excluded).
///
/// See <https://goo.gl/xmQnxf> for more information and examples of labels.
/// System reserved label keys are prefixed with "aiplatform.googleapis.com/"
/// and are immutable.
#[prost(map = "string, string", tag = "3")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Required. The data that the DataItem represents (for example, an image or a text
/// snippet). The schema of the payload is stored in the parent Dataset's
/// [metadata schema's]\[google.cloud.aiplatform.v1.Dataset.metadata_schema_uri\] dataItemSchemaUri field.
#[prost(message, optional, tag = "4")]
pub payload: ::core::option::Option<::prost_types::Value>,
/// Optional. Used to perform consistent read-modify-write updates. If not set, a blind
/// "overwrite" update happens.
#[prost(string, tag = "7")]
pub etag: ::prost::alloc::string::String,
}
/// SpecialistPool represents customers' own workforce to work on their data
/// labeling jobs. It includes a group of specialist managers and workers.
/// Managers are responsible for managing the workers in this pool as well as
/// customers' data labeling jobs associated with this pool. Customers create
/// specialist pool as well as start data labeling jobs on Cloud, managers and
/// workers handle the jobs using CrowdCompute console.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SpecialistPool {
/// Required. The resource name of the SpecialistPool.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. The user-defined name of the SpecialistPool.
/// The name can be up to 128 characters long and can be consist of any UTF-8
/// characters.
/// This field should be unique on project-level.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// Output only. The number of managers in this SpecialistPool.
#[prost(int32, tag = "3")]
pub specialist_managers_count: i32,
/// The email addresses of the managers in the SpecialistPool.
#[prost(string, repeated, tag = "4")]
pub specialist_manager_emails: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Output only. The resource name of the pending data labeling jobs.
#[prost(string, repeated, tag = "5")]
pub pending_data_labeling_jobs: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The email addresses of workers in the SpecialistPool.
#[prost(string, repeated, tag = "7")]
pub specialist_worker_emails: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
/// DataLabelingJob is used to trigger a human labeling job on unlabeled data
/// from the following Dataset:
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DataLabelingJob {
/// Output only. Resource name of the DataLabelingJob.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. The user-defined name of the DataLabelingJob.
/// The name can be up to 128 characters long and can be consist of any UTF-8
/// characters.
/// Display name of a DataLabelingJob.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// Required. Dataset resource names. Right now we only support labeling from a single
/// Dataset.
/// Format:
/// `projects/{project}/locations/{location}/datasets/{dataset}`
#[prost(string, repeated, tag = "3")]
pub datasets: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Labels to assign to annotations generated by this DataLabelingJob.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
/// See <https://goo.gl/xmQnxf> for more information and examples of labels.
/// System reserved label keys are prefixed with "aiplatform.googleapis.com/"
/// and are immutable.
#[prost(map = "string, string", tag = "12")]
pub annotation_labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Required. Number of labelers to work on each DataItem.
#[prost(int32, tag = "4")]
pub labeler_count: i32,
/// Required. The Google Cloud Storage location of the instruction pdf. This pdf is
/// shared with labelers, and provides detailed description on how to label
/// DataItems in Datasets.
#[prost(string, tag = "5")]
pub instruction_uri: ::prost::alloc::string::String,
/// Required. Points to a YAML file stored on Google Cloud Storage describing the
/// config for a specific type of DataLabelingJob.
/// The schema files that can be used here are found in the
/// <https://storage.googleapis.com/google-cloud-aiplatform> bucket in the
/// /schema/datalabelingjob/inputs/ folder.
#[prost(string, tag = "6")]
pub inputs_schema_uri: ::prost::alloc::string::String,
/// Required. Input config parameters for the DataLabelingJob.
#[prost(message, optional, tag = "7")]
pub inputs: ::core::option::Option<::prost_types::Value>,
/// Output only. The detailed state of the job.
#[prost(enumeration = "JobState", tag = "8")]
pub state: i32,
/// Output only. Current labeling job progress percentage scaled in interval [0, 100],
/// indicating the percentage of DataItems that has been finished.
#[prost(int32, tag = "13")]
pub labeling_progress: i32,
/// Output only. Estimated cost(in US dollars) that the DataLabelingJob has incurred to
/// date.
#[prost(message, optional, tag = "14")]
pub current_spend: ::core::option::Option<super::super::super::r#type::Money>,
/// Output only. Timestamp when this DataLabelingJob was created.
#[prost(message, optional, tag = "9")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this DataLabelingJob was updated most recently.
#[prost(message, optional, tag = "10")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. DataLabelingJob errors. It is only populated when job's state is
/// `JOB_STATE_FAILED` or `JOB_STATE_CANCELLED`.
#[prost(message, optional, tag = "22")]
pub error: ::core::option::Option<super::super::super::rpc::Status>,
/// The labels with user-defined metadata to organize your DataLabelingJobs.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
///
/// See <https://goo.gl/xmQnxf> for more information and examples of labels.
/// System reserved label keys are prefixed with "aiplatform.googleapis.com/"
/// and are immutable. Following system labels exist for each DataLabelingJob:
///
/// * "aiplatform.googleapis.com/schema": output only, its value is the
/// \[inputs_schema][google.cloud.aiplatform.v1.DataLabelingJob.inputs_schema_uri\]'s title.
#[prost(map = "string, string", tag = "11")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// The SpecialistPools' resource names associated with this job.
#[prost(string, repeated, tag = "16")]
pub specialist_pools: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Customer-managed encryption key spec for a DataLabelingJob. If set, this
/// DataLabelingJob will be secured by this key.
///
/// Note: Annotations created in the DataLabelingJob are associated with
/// the EncryptionSpec of the Dataset they are exported to.
#[prost(message, optional, tag = "20")]
pub encryption_spec: ::core::option::Option<EncryptionSpec>,
/// Parameters that configure the active learning pipeline. Active learning
/// will label the data incrementally via several iterations. For every
/// iteration, it will select a batch of data based on the sampling strategy.
#[prost(message, optional, tag = "21")]
pub active_learning_config: ::core::option::Option<ActiveLearningConfig>,
}
/// Parameters that configure the active learning pipeline. Active learning will
/// label the data incrementally by several iterations. For every iteration, it
/// will select a batch of data based on the sampling strategy.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ActiveLearningConfig {
/// Active learning data sampling config. For every active learning labeling
/// iteration, it will select a batch of data based on the sampling strategy.
#[prost(message, optional, tag = "3")]
pub sample_config: ::core::option::Option<SampleConfig>,
/// CMLE training config. For every active learning labeling iteration, system
/// will train a machine learning model on CMLE. The trained model will be used
/// by data sampling algorithm to select DataItems.
#[prost(message, optional, tag = "4")]
pub training_config: ::core::option::Option<TrainingConfig>,
/// Required. Max human labeling DataItems. The rest part will be labeled by
/// machine.
#[prost(oneof = "active_learning_config::HumanLabelingBudget", tags = "1, 2")]
pub human_labeling_budget: ::core::option::Option<active_learning_config::HumanLabelingBudget>,
}
/// Nested message and enum types in `ActiveLearningConfig`.
pub mod active_learning_config {
/// Required. Max human labeling DataItems. The rest part will be labeled by
/// machine.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum HumanLabelingBudget {
/// Max number of human labeled DataItems.
#[prost(int64, tag = "1")]
MaxDataItemCount(i64),
/// Max percent of total DataItems for human labeling.
#[prost(int32, tag = "2")]
MaxDataItemPercentage(i32),
}
}
/// Active learning data sampling config. For every active learning labeling
/// iteration, it will select a batch of data based on the sampling strategy.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SampleConfig {
/// Field to choose sampling strategy. Sampling strategy will decide which data
/// should be selected for human labeling in every batch.
#[prost(enumeration = "sample_config::SampleStrategy", tag = "5")]
pub sample_strategy: i32,
/// Decides sample size for the initial batch. initial_batch_sample_percentage
/// is used by default.
#[prost(oneof = "sample_config::InitialBatchSampleSize", tags = "1")]
pub initial_batch_sample_size: ::core::option::Option<sample_config::InitialBatchSampleSize>,
/// Decides sample size for the following batches.
/// following_batch_sample_percentage is used by default.
#[prost(oneof = "sample_config::FollowingBatchSampleSize", tags = "3")]
pub following_batch_sample_size:
::core::option::Option<sample_config::FollowingBatchSampleSize>,
}
/// Nested message and enum types in `SampleConfig`.
pub mod sample_config {
/// Sample strategy decides which subset of DataItems should be selected for
/// human labeling in every batch.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum SampleStrategy {
/// Default will be treated as UNCERTAINTY.
Unspecified = 0,
/// Sample the most uncertain data to label.
Uncertainty = 1,
}
/// Decides sample size for the initial batch. initial_batch_sample_percentage
/// is used by default.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum InitialBatchSampleSize {
/// The percentage of data needed to be labeled in the first batch.
#[prost(int32, tag = "1")]
InitialBatchSamplePercentage(i32),
}
/// Decides sample size for the following batches.
/// following_batch_sample_percentage is used by default.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum FollowingBatchSampleSize {
/// The percentage of data needed to be labeled in each following batch
/// (except the first batch).
#[prost(int32, tag = "3")]
FollowingBatchSamplePercentage(i32),
}
}
/// CMLE training config. For every active learning labeling iteration, system
/// will train a machine learning model on CMLE. The trained model will be used
/// by data sampling algorithm to select DataItems.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TrainingConfig {
/// The timeout hours for the CMLE training job, expressed in milli hours
/// i.e. 1,000 value in this field means 1 hour.
#[prost(int64, tag = "1")]
pub timeout_training_milli_hours: i64,
}
/// A collection of DataItems and Annotations on them.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Dataset {
/// Output only. The resource name of the Dataset.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. The user-defined name of the Dataset.
/// The name can be up to 128 characters long and can be consist of any UTF-8
/// characters.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// Optional. The description of the Dataset.
#[prost(string, tag = "16")]
pub description: ::prost::alloc::string::String,
/// Required. Points to a YAML file stored on Google Cloud Storage describing additional
/// information about the Dataset.
/// The schema is defined as an OpenAPI 3.0.2 Schema Object.
/// The schema files that can be used here are found in
/// gs://google-cloud-aiplatform/schema/dataset/metadata/.
#[prost(string, tag = "3")]
pub metadata_schema_uri: ::prost::alloc::string::String,
/// Required. Additional information about the Dataset.
#[prost(message, optional, tag = "8")]
pub metadata: ::core::option::Option<::prost_types::Value>,
/// Output only. Timestamp when this Dataset was created.
#[prost(message, optional, tag = "4")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this Dataset was last updated.
#[prost(message, optional, tag = "5")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// Used to perform consistent read-modify-write updates. If not set, a blind
/// "overwrite" update happens.
#[prost(string, tag = "6")]
pub etag: ::prost::alloc::string::String,
/// The labels with user-defined metadata to organize your Datasets.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
/// No more than 64 user labels can be associated with one Dataset (System
/// labels are excluded).
///
/// See <https://goo.gl/xmQnxf> for more information and examples of labels.
/// System reserved label keys are prefixed with "aiplatform.googleapis.com/"
/// and are immutable. Following system labels exist for each Dataset:
///
/// * "aiplatform.googleapis.com/dataset_metadata_schema": output only, its
/// value is the \[metadata_schema's][google.cloud.aiplatform.v1.Dataset.metadata_schema_uri\] title.
#[prost(map = "string, string", tag = "7")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Customer-managed encryption key spec for a Dataset. If set, this Dataset
/// and all sub-resources of this Dataset will be secured by this key.
#[prost(message, optional, tag = "11")]
pub encryption_spec: ::core::option::Option<EncryptionSpec>,
}
/// Describes the location from where we import data into a Dataset, together
/// with the labels that will be applied to the DataItems and the Annotations.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ImportDataConfig {
/// Labels that will be applied to newly imported DataItems. If an identical
/// DataItem as one being imported already exists in the Dataset, then these
/// labels will be appended to these of the already existing one, and if labels
/// with identical key is imported before, the old label value will be
/// overwritten. If two DataItems are identical in the same import data
/// operation, the labels will be combined and if key collision happens in this
/// case, one of the values will be picked randomly. Two DataItems are
/// considered identical if their content bytes are identical (e.g. image bytes
/// or pdf bytes).
/// These labels will be overridden by Annotation labels specified inside index
/// file referenced by \[import_schema_uri][google.cloud.aiplatform.v1.ImportDataConfig.import_schema_uri\], e.g. jsonl file.
#[prost(map = "string, string", tag = "2")]
pub data_item_labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Required. Points to a YAML file stored on Google Cloud Storage describing the import
/// format. Validation will be done against the schema. The schema is defined
/// as an [OpenAPI 3.0.2 Schema
/// Object](<https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject>).
#[prost(string, tag = "4")]
pub import_schema_uri: ::prost::alloc::string::String,
/// The source of the input.
#[prost(oneof = "import_data_config::Source", tags = "1")]
pub source: ::core::option::Option<import_data_config::Source>,
}
/// Nested message and enum types in `ImportDataConfig`.
pub mod import_data_config {
/// The source of the input.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Source {
/// The Google Cloud Storage location for the input content.
#[prost(message, tag = "1")]
GcsSource(super::GcsSource),
}
}
/// Describes what part of the Dataset is to be exported, the destination of
/// the export and how to export.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExportDataConfig {
/// A filter on Annotations of the Dataset. Only Annotations on to-be-exported
/// DataItems(specified by \[data_items_filter][\]) that match this filter will
/// be exported. The filter syntax is the same as in
/// \[ListAnnotations][google.cloud.aiplatform.v1.DatasetService.ListAnnotations\].
#[prost(string, tag = "2")]
pub annotations_filter: ::prost::alloc::string::String,
/// The destination of the output.
#[prost(oneof = "export_data_config::Destination", tags = "1")]
pub destination: ::core::option::Option<export_data_config::Destination>,
}
/// Nested message and enum types in `ExportDataConfig`.
pub mod export_data_config {
/// The destination of the output.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Destination {
/// The Google Cloud Storage location where the output is to be written to.
/// In the given directory a new directory will be created with name:
/// `export-data-<dataset-display-name>-<timestamp-of-export-call>` where
/// timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. All export
/// output will be written into that directory. Inside that directory,
/// annotations with the same schema will be grouped into sub directories
/// which are named with the corresponding annotations' schema title. Inside
/// these sub directories, a schema.yaml will be created to describe the
/// output format.
#[prost(message, tag = "1")]
GcsDestination(super::GcsDestination),
}
}
/// Generic Metadata shared by all operations.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GenericOperationMetadata {
/// Output only. Partial failures encountered.
/// E.g. single files that couldn't be read.
/// This field should never exceed 20 entries.
/// Status details field will contain standard GCP error details.
#[prost(message, repeated, tag = "1")]
pub partial_failures: ::prost::alloc::vec::Vec<super::super::super::rpc::Status>,
/// Output only. Time when the operation was created.
#[prost(message, optional, tag = "2")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Time when the operation was updated for the last time.
/// If the operation has finished (successfully or not), this is the finish
/// time.
#[prost(message, optional, tag = "3")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
}
/// Details of operations that perform deletes of any entities.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteOperationMetadata {
/// The common part of the operation metadata.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
/// Points to a DeployedModel.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeployedModelRef {
/// Immutable. A resource name of an Endpoint.
#[prost(string, tag = "1")]
pub endpoint: ::prost::alloc::string::String,
/// Immutable. An ID of a DeployedModel in the above Endpoint.
#[prost(string, tag = "2")]
pub deployed_model_id: ::prost::alloc::string::String,
}
/// A trained machine learning Model.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Model {
/// The resource name of the Model.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. The display name of the Model.
/// The name can be up to 128 characters long and can be consist of any UTF-8
/// characters.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// The description of the Model.
#[prost(string, tag = "3")]
pub description: ::prost::alloc::string::String,
/// The schemata that describe formats of the Model's predictions and
/// explanations as given and returned via
/// \[PredictionService.Predict][google.cloud.aiplatform.v1.PredictionService.Predict\] and \[PredictionService.Explain][google.cloud.aiplatform.v1.PredictionService.Explain\].
#[prost(message, optional, tag = "4")]
pub predict_schemata: ::core::option::Option<PredictSchemata>,
/// Immutable. Points to a YAML file stored on Google Cloud Storage describing additional
/// information about the Model, that is specific to it. Unset if the Model
/// does not have any additional information.
/// The schema is defined as an OpenAPI 3.0.2 [Schema
/// Object](<https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject>).
/// AutoML Models always have this field populated by Vertex AI, if no
/// additional metadata is needed, this field is set to an empty string.
/// Note: The URI given on output will be immutable and probably different,
/// including the URI scheme, than the one given on input. The output URI will
/// point to a location where the user only has a read access.
#[prost(string, tag = "5")]
pub metadata_schema_uri: ::prost::alloc::string::String,
/// Immutable. An additional information about the Model; the schema of the metadata can
/// be found in \[metadata_schema][google.cloud.aiplatform.v1.Model.metadata_schema_uri\].
/// Unset if the Model does not have any additional information.
#[prost(message, optional, tag = "6")]
pub metadata: ::core::option::Option<::prost_types::Value>,
/// Output only. The formats in which this Model may be exported. If empty, this Model is
/// not available for export.
#[prost(message, repeated, tag = "20")]
pub supported_export_formats: ::prost::alloc::vec::Vec<model::ExportFormat>,
/// Output only. The resource name of the TrainingPipeline that uploaded this Model, if any.
#[prost(string, tag = "7")]
pub training_pipeline: ::prost::alloc::string::String,
/// Input only. The specification of the container that is to be used when deploying
/// this Model. The specification is ingested upon
/// \[ModelService.UploadModel][google.cloud.aiplatform.v1.ModelService.UploadModel\], and all binaries it contains are copied
/// and stored internally by Vertex AI.
/// Not present for AutoML Models.
#[prost(message, optional, tag = "9")]
pub container_spec: ::core::option::Option<ModelContainerSpec>,
/// Immutable. The path to the directory containing the Model artifact and any of its
/// supporting files.
/// Not present for AutoML Models.
#[prost(string, tag = "26")]
pub artifact_uri: ::prost::alloc::string::String,
/// Output only. When this Model is deployed, its prediction resources are described by the
/// `prediction_resources` field of the \[Endpoint.deployed_models][google.cloud.aiplatform.v1.Endpoint.deployed_models\] object.
/// Because not all Models support all resource configuration types, the
/// configuration types this Model supports are listed here. If no
/// configuration types are listed, the Model cannot be deployed to an
/// \[Endpoint][google.cloud.aiplatform.v1.Endpoint\] and does not support
/// online predictions (\[PredictionService.Predict][google.cloud.aiplatform.v1.PredictionService.Predict\] or
/// \[PredictionService.Explain][google.cloud.aiplatform.v1.PredictionService.Explain\]). Such a Model can serve predictions by
/// using a \[BatchPredictionJob][google.cloud.aiplatform.v1.BatchPredictionJob\], if it has at least one entry each in
/// \[supported_input_storage_formats][google.cloud.aiplatform.v1.Model.supported_input_storage_formats\] and
/// \[supported_output_storage_formats][google.cloud.aiplatform.v1.Model.supported_output_storage_formats\].
#[prost(
enumeration = "model::DeploymentResourcesType",
repeated,
packed = "false",
tag = "10"
)]
pub supported_deployment_resources_types: ::prost::alloc::vec::Vec<i32>,
/// Output only. The formats this Model supports in
/// \[BatchPredictionJob.input_config][google.cloud.aiplatform.v1.BatchPredictionJob.input_config\]. If
/// \[PredictSchemata.instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri\] exists, the instances
/// should be given as per that schema.
///
/// The possible formats are:
///
/// * `jsonl`
/// The JSON Lines format, where each instance is a single line. Uses
/// \[GcsSource][google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.gcs_source\].
///
/// * `csv`
/// The CSV format, where each instance is a single comma-separated line.
/// The first line in the file is the header, containing comma-separated field
/// names. Uses \[GcsSource][google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.gcs_source\].
///
/// * `tf-record`
/// The TFRecord format, where each instance is a single record in tfrecord
/// syntax. Uses \[GcsSource][google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.gcs_source\].
///
/// * `tf-record-gzip`
/// Similar to `tf-record`, but the file is gzipped. Uses
/// \[GcsSource][google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.gcs_source\].
///
/// * `bigquery`
/// Each instance is a single row in BigQuery. Uses
/// \[BigQuerySource][google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig.bigquery_source\].
///
/// * `file-list`
/// Each line of the file is the location of an instance to process, uses
/// `gcs_source` field of the
/// \[InputConfig][google.cloud.aiplatform.v1.BatchPredictionJob.InputConfig\] object.
///
///
/// If this Model doesn't support any of these formats it means it cannot be
/// used with a \[BatchPredictionJob][google.cloud.aiplatform.v1.BatchPredictionJob\]. However, if it has
/// \[supported_deployment_resources_types][google.cloud.aiplatform.v1.Model.supported_deployment_resources_types\], it could serve online
/// predictions by using \[PredictionService.Predict][google.cloud.aiplatform.v1.PredictionService.Predict\] or
/// \[PredictionService.Explain][google.cloud.aiplatform.v1.PredictionService.Explain\].
#[prost(string, repeated, tag = "11")]
pub supported_input_storage_formats: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Output only. The formats this Model supports in
/// \[BatchPredictionJob.output_config][google.cloud.aiplatform.v1.BatchPredictionJob.output_config\]. If both
/// \[PredictSchemata.instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri\] and
/// \[PredictSchemata.prediction_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.prediction_schema_uri\] exist, the predictions
/// are returned together with their instances. In other words, the
/// prediction has the original instance data first, followed
/// by the actual prediction content (as per the schema).
///
/// The possible formats are:
///
/// * `jsonl`
/// The JSON Lines format, where each prediction is a single line. Uses
/// \[GcsDestination][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.gcs_destination\].
///
/// * `csv`
/// The CSV format, where each prediction is a single comma-separated line.
/// The first line in the file is the header, containing comma-separated field
/// names. Uses
/// \[GcsDestination][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.gcs_destination\].
///
/// * `bigquery`
/// Each prediction is a single row in a BigQuery table, uses
/// \[BigQueryDestination][google.cloud.aiplatform.v1.BatchPredictionJob.OutputConfig.bigquery_destination\]
/// .
///
///
/// If this Model doesn't support any of these formats it means it cannot be
/// used with a \[BatchPredictionJob][google.cloud.aiplatform.v1.BatchPredictionJob\]. However, if it has
/// \[supported_deployment_resources_types][google.cloud.aiplatform.v1.Model.supported_deployment_resources_types\], it could serve online
/// predictions by using \[PredictionService.Predict][google.cloud.aiplatform.v1.PredictionService.Predict\] or
/// \[PredictionService.Explain][google.cloud.aiplatform.v1.PredictionService.Explain\].
#[prost(string, repeated, tag = "12")]
pub supported_output_storage_formats: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Output only. Timestamp when this Model was uploaded into Vertex AI.
#[prost(message, optional, tag = "13")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this Model was most recently updated.
#[prost(message, optional, tag = "14")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. The pointers to DeployedModels created from this Model. Note that
/// Model could have been deployed to Endpoints in different Locations.
#[prost(message, repeated, tag = "15")]
pub deployed_models: ::prost::alloc::vec::Vec<DeployedModelRef>,
/// The default explanation specification for this Model.
///
/// The Model can be used for [requesting
/// explanation]\[PredictionService.Explain\] after being
/// \[deployed][google.cloud.aiplatform.v1.EndpointService.DeployModel\] if it is populated.
/// The Model can be used for [batch
/// explanation]\[BatchPredictionJob.generate_explanation\] if it is populated.
///
/// All fields of the explanation_spec can be overridden by
/// \[explanation_spec][google.cloud.aiplatform.v1.DeployedModel.explanation_spec\] of
/// \[DeployModelRequest.deployed_model][google.cloud.aiplatform.v1.DeployModelRequest.deployed_model\], or
/// \[explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec\] of
/// \[BatchPredictionJob][google.cloud.aiplatform.v1.BatchPredictionJob\].
///
/// If the default explanation specification is not set for this Model, this
/// Model can still be used for [requesting
/// explanation]\[PredictionService.Explain\] by setting
/// \[explanation_spec][google.cloud.aiplatform.v1.DeployedModel.explanation_spec\] of
/// \[DeployModelRequest.deployed_model][google.cloud.aiplatform.v1.DeployModelRequest.deployed_model\] and for [batch
/// explanation]\[BatchPredictionJob.generate_explanation\] by setting
/// \[explanation_spec][google.cloud.aiplatform.v1.BatchPredictionJob.explanation_spec\] of
/// \[BatchPredictionJob][google.cloud.aiplatform.v1.BatchPredictionJob\].
#[prost(message, optional, tag = "23")]
pub explanation_spec: ::core::option::Option<ExplanationSpec>,
/// Used to perform consistent read-modify-write updates. If not set, a blind
/// "overwrite" update happens.
#[prost(string, tag = "16")]
pub etag: ::prost::alloc::string::String,
/// The labels with user-defined metadata to organize your Models.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
///
/// See <https://goo.gl/xmQnxf> for more information and examples of labels.
#[prost(map = "string, string", tag = "17")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Customer-managed encryption key spec for a Model. If set, this
/// Model and all sub-resources of this Model will be secured by this key.
#[prost(message, optional, tag = "24")]
pub encryption_spec: ::core::option::Option<EncryptionSpec>,
}
/// Nested message and enum types in `Model`.
pub mod model {
/// Represents export format supported by the Model.
/// All formats export to Google Cloud Storage.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExportFormat {
/// Output only. The ID of the export format.
/// The possible format IDs are:
///
/// * `tflite`
/// Used for Android mobile devices.
///
/// * `edgetpu-tflite`
/// Used for [Edge TPU](<https://cloud.google.com/edge-tpu/>) devices.
///
/// * `tf-saved-model`
/// A tensorflow model in SavedModel format.
///
/// * `tf-js`
/// A \[TensorFlow.js\](<https://www.tensorflow.org/js>) model that can be used
/// in the browser and in Node.js using JavaScript.
///
/// * `core-ml`
/// Used for iOS mobile devices.
///
/// * `custom-trained`
/// A Model that was uploaded or trained by custom code.
#[prost(string, tag = "1")]
pub id: ::prost::alloc::string::String,
/// Output only. The content of this Model that may be exported.
#[prost(
enumeration = "export_format::ExportableContent",
repeated,
packed = "false",
tag = "2"
)]
pub exportable_contents: ::prost::alloc::vec::Vec<i32>,
}
/// Nested message and enum types in `ExportFormat`.
pub mod export_format {
/// The Model content that can be exported.
#[derive(
Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
)]
#[repr(i32)]
pub enum ExportableContent {
/// Should not be used.
Unspecified = 0,
/// Model artifact and any of its supported files. Will be exported to the
/// location specified by the `artifactDestination` field of the
/// \[ExportModelRequest.output_config][google.cloud.aiplatform.v1.ExportModelRequest.output_config\] object.
Artifact = 1,
/// The container image that is to be used when deploying this Model. Will
/// be exported to the location specified by the `imageDestination` field
/// of the \[ExportModelRequest.output_config][google.cloud.aiplatform.v1.ExportModelRequest.output_config\] object.
Image = 2,
}
}
/// Identifies a type of Model's prediction resources.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum DeploymentResourcesType {
/// Should not be used.
Unspecified = 0,
/// Resources that are dedicated to the \[DeployedModel][google.cloud.aiplatform.v1.DeployedModel\], and that need a
/// higher degree of manual configuration.
DedicatedResources = 1,
/// Resources that to large degree are decided by Vertex AI, and require
/// only a modest additional configuration.
AutomaticResources = 2,
}
}
/// Contains the schemata used in Model's predictions and explanations via
/// \[PredictionService.Predict][google.cloud.aiplatform.v1.PredictionService.Predict\], \[PredictionService.Explain][google.cloud.aiplatform.v1.PredictionService.Explain\] and
/// \[BatchPredictionJob][google.cloud.aiplatform.v1.BatchPredictionJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PredictSchemata {
/// Immutable. Points to a YAML file stored on Google Cloud Storage describing the format
/// of a single instance, which are used in \[PredictRequest.instances][google.cloud.aiplatform.v1.PredictRequest.instances\],
/// \[ExplainRequest.instances][google.cloud.aiplatform.v1.ExplainRequest.instances\] and
/// \[BatchPredictionJob.input_config][google.cloud.aiplatform.v1.BatchPredictionJob.input_config\].
/// The schema is defined as an OpenAPI 3.0.2 [Schema
/// Object](<https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject>).
/// AutoML Models always have this field populated by Vertex AI.
/// Note: The URI given on output will be immutable and probably different,
/// including the URI scheme, than the one given on input. The output URI will
/// point to a location where the user only has a read access.
#[prost(string, tag = "1")]
pub instance_schema_uri: ::prost::alloc::string::String,
/// Immutable. Points to a YAML file stored on Google Cloud Storage describing the
/// parameters of prediction and explanation via
/// \[PredictRequest.parameters][google.cloud.aiplatform.v1.PredictRequest.parameters\], \[ExplainRequest.parameters][google.cloud.aiplatform.v1.ExplainRequest.parameters\] and
/// \[BatchPredictionJob.model_parameters][google.cloud.aiplatform.v1.BatchPredictionJob.model_parameters\].
/// The schema is defined as an OpenAPI 3.0.2 [Schema
/// Object](<https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject>).
/// AutoML Models always have this field populated by Vertex AI, if no
/// parameters are supported, then it is set to an empty string.
/// Note: The URI given on output will be immutable and probably different,
/// including the URI scheme, than the one given on input. The output URI will
/// point to a location where the user only has a read access.
#[prost(string, tag = "2")]
pub parameters_schema_uri: ::prost::alloc::string::String,
/// Immutable. Points to a YAML file stored on Google Cloud Storage describing the format
/// of a single prediction produced by this Model, which are returned via
/// \[PredictResponse.predictions][google.cloud.aiplatform.v1.PredictResponse.predictions\], \[ExplainResponse.explanations][google.cloud.aiplatform.v1.ExplainResponse.explanations\], and
/// \[BatchPredictionJob.output_config][google.cloud.aiplatform.v1.BatchPredictionJob.output_config\].
/// The schema is defined as an OpenAPI 3.0.2 [Schema
/// Object](<https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject>).
/// AutoML Models always have this field populated by Vertex AI.
/// Note: The URI given on output will be immutable and probably different,
/// including the URI scheme, than the one given on input. The output URI will
/// point to a location where the user only has a read access.
#[prost(string, tag = "3")]
pub prediction_schema_uri: ::prost::alloc::string::String,
}
/// Specification of a container for serving predictions. Some fields in this
/// message correspond to fields in the [Kubernetes Container v1 core
/// specification](<https://v1-18.docs.kubernetes.io/docs/reference/generated/kubernetes-api/v1.18/#container-v1-core>).
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ModelContainerSpec {
/// Required. Immutable. URI of the Docker image to be used as the custom container for serving
/// predictions. This URI must identify an image in Artifact Registry or
/// Container Registry. Learn more about the [container publishing
/// requirements](<https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#publishing>),
/// including permissions requirements for the Vertex AI Service Agent.
///
/// The container image is ingested upon \[ModelService.UploadModel][google.cloud.aiplatform.v1.ModelService.UploadModel\], stored
/// internally, and this original path is afterwards not used.
///
/// To learn about the requirements for the Docker image itself, see
/// [Custom container
/// requirements](<https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#>).
///
/// You can use the URI to one of Vertex AI's [pre-built container images for
/// prediction](<https://cloud.google.com/vertex-ai/docs/predictions/pre-built-containers>)
/// in this field.
#[prost(string, tag = "1")]
pub image_uri: ::prost::alloc::string::String,
/// Immutable. Specifies the command that runs when the container starts. This overrides
/// the container's
/// \[ENTRYPOINT\](<https://docs.docker.com/engine/reference/builder/#entrypoint>).
/// Specify this field as an array of executable and arguments, similar to a
/// Docker `ENTRYPOINT`'s "exec" form, not its "shell" form.
///
/// If you do not specify this field, then the container's `ENTRYPOINT` runs,
/// in conjunction with the \[args][google.cloud.aiplatform.v1.ModelContainerSpec.args\] field or the
/// container's \[`CMD`\](<https://docs.docker.com/engine/reference/builder/#cmd>),
/// if either exists. If this field is not specified and the container does not
/// have an `ENTRYPOINT`, then refer to the Docker documentation about [how
/// `CMD` and `ENTRYPOINT`
/// interact](<https://docs.docker.com/engine/reference/builder/#understand-how-cmd-and-entrypoint-interact>).
///
/// If you specify this field, then you can also specify the `args` field to
/// provide additional arguments for this command. However, if you specify this
/// field, then the container's `CMD` is ignored. See the
/// [Kubernetes documentation about how the
/// `command` and `args` fields interact with a container's `ENTRYPOINT` and
/// `CMD`](<https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#notes>).
///
/// In this field, you can reference [environment variables set by Vertex
/// AI](<https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables>)
/// and environment variables set in the \[env][google.cloud.aiplatform.v1.ModelContainerSpec.env\] field.
/// You cannot reference environment variables set in the Docker image. In
/// order for environment variables to be expanded, reference them by using the
/// following syntax:
/// <code>$(<var>VARIABLE_NAME</var>)</code>
/// Note that this differs from Bash variable expansion, which does not use
/// parentheses. If a variable cannot be resolved, the reference in the input
/// string is used unchanged. To avoid variable expansion, you can escape this
/// syntax with `$$`; for example:
/// <code>$$(<var>VARIABLE_NAME</var>)</code>
/// This field corresponds to the `command` field of the Kubernetes Containers
/// [v1 core
/// API](<https://v1-18.docs.kubernetes.io/docs/reference/generated/kubernetes-api/v1.18/#container-v1-core>).
#[prost(string, repeated, tag = "2")]
pub command: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Immutable. Specifies arguments for the command that runs when the container starts.
/// This overrides the container's
/// \[`CMD`\](<https://docs.docker.com/engine/reference/builder/#cmd>). Specify
/// this field as an array of executable and arguments, similar to a Docker
/// `CMD`'s "default parameters" form.
///
/// If you don't specify this field but do specify the
/// \[command][google.cloud.aiplatform.v1.ModelContainerSpec.command\] field, then the command from the
/// `command` field runs without any additional arguments. See the
/// [Kubernetes documentation about how the
/// `command` and `args` fields interact with a container's `ENTRYPOINT` and
/// `CMD`](<https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#notes>).
///
/// If you don't specify this field and don't specify the `command` field,
/// then the container's
/// \[`ENTRYPOINT`\](<https://docs.docker.com/engine/reference/builder/#cmd>) and
/// `CMD` determine what runs based on their default behavior. See the Docker
/// documentation about [how `CMD` and `ENTRYPOINT`
/// interact](<https://docs.docker.com/engine/reference/builder/#understand-how-cmd-and-entrypoint-interact>).
///
/// In this field, you can reference [environment variables
/// set by Vertex
/// AI](<https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables>)
/// and environment variables set in the \[env][google.cloud.aiplatform.v1.ModelContainerSpec.env\] field.
/// You cannot reference environment variables set in the Docker image. In
/// order for environment variables to be expanded, reference them by using the
/// following syntax:
/// <code>$(<var>VARIABLE_NAME</var>)</code>
/// Note that this differs from Bash variable expansion, which does not use
/// parentheses. If a variable cannot be resolved, the reference in the input
/// string is used unchanged. To avoid variable expansion, you can escape this
/// syntax with `$$`; for example:
/// <code>$$(<var>VARIABLE_NAME</var>)</code>
/// This field corresponds to the `args` field of the Kubernetes Containers
/// [v1 core
/// API](<https://v1-18.docs.kubernetes.io/docs/reference/generated/kubernetes-api/v1.18/#container-v1-core>).
#[prost(string, repeated, tag = "3")]
pub args: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Immutable. List of environment variables to set in the container. After the container
/// starts running, code running in the container can read these environment
/// variables.
///
/// Additionally, the \[command][google.cloud.aiplatform.v1.ModelContainerSpec.command\] and
/// \[args][google.cloud.aiplatform.v1.ModelContainerSpec.args\] fields can reference these variables. Later
/// entries in this list can also reference earlier entries. For example, the
/// following example sets the variable `VAR_2` to have the value `foo bar`:
///
/// ```json
/// [
/// {
/// "name": "VAR_1",
/// "value": "foo"
/// },
/// {
/// "name": "VAR_2",
/// "value": "$(VAR_1) bar"
/// }
/// ]
/// ```
///
/// If you switch the order of the variables in the example, then the expansion
/// does not occur.
///
/// This field corresponds to the `env` field of the Kubernetes Containers
/// [v1 core
/// API](<https://v1-18.docs.kubernetes.io/docs/reference/generated/kubernetes-api/v1.18/#container-v1-core>).
#[prost(message, repeated, tag = "4")]
pub env: ::prost::alloc::vec::Vec<EnvVar>,
/// Immutable. List of ports to expose from the container. Vertex AI sends any
/// prediction requests that it receives to the first port on this list. Vertex
/// AI also sends
/// [liveness and health
/// checks](<https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#liveness>)
/// to this port.
///
/// If you do not specify this field, it defaults to following value:
///
/// ```json
/// [
/// {
/// "containerPort": 8080
/// }
/// ]
/// ```
///
/// Vertex AI does not use ports other than the first one listed. This field
/// corresponds to the `ports` field of the Kubernetes Containers
/// [v1 core
/// API](<https://v1-18.docs.kubernetes.io/docs/reference/generated/kubernetes-api/v1.18/#container-v1-core>).
#[prost(message, repeated, tag = "5")]
pub ports: ::prost::alloc::vec::Vec<Port>,
/// Immutable. HTTP path on the container to send prediction requests to. Vertex AI
/// forwards requests sent using
/// \[projects.locations.endpoints.predict][google.cloud.aiplatform.v1.PredictionService.Predict\] to this
/// path on the container's IP address and port. Vertex AI then returns the
/// container's response in the API response.
///
/// For example, if you set this field to `/foo`, then when Vertex AI
/// receives a prediction request, it forwards the request body in a POST
/// request to the `/foo` path on the port of your container specified by the
/// first value of this `ModelContainerSpec`'s
/// \[ports][google.cloud.aiplatform.v1.ModelContainerSpec.ports\] field.
///
/// If you don't specify this field, it defaults to the following value when
/// you [deploy this Model to an Endpoint]\[google.cloud.aiplatform.v1.EndpointService.DeployModel\]:
/// <code>/v1/endpoints/<var>ENDPOINT</var>/deployedModels/<var>DEPLOYED_MODEL</var>:predict</code>
/// The placeholders in this value are replaced as follows:
///
/// * <var>ENDPOINT</var>: The last segment (following `endpoints/`)of the
/// Endpoint.name][] field of the Endpoint where this Model has been
/// deployed. (Vertex AI makes this value available to your container code
/// as the [`AIP_ENDPOINT_ID` environment
/// variable](<https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables>).)
///
/// * <var>DEPLOYED_MODEL</var>: \[DeployedModel.id][google.cloud.aiplatform.v1.DeployedModel.id\] of the `DeployedModel`.
/// (Vertex AI makes this value available to your container code
/// as the [`AIP_DEPLOYED_MODEL_ID` environment
/// variable](<https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables>).)
#[prost(string, tag = "6")]
pub predict_route: ::prost::alloc::string::String,
/// Immutable. HTTP path on the container to send health checks to. Vertex AI
/// intermittently sends GET requests to this path on the container's IP
/// address and port to check that the container is healthy. Read more about
/// [health
/// checks](<https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#health>).
///
/// For example, if you set this field to `/bar`, then Vertex AI
/// intermittently sends a GET request to the `/bar` path on the port of your
/// container specified by the first value of this `ModelContainerSpec`'s
/// \[ports][google.cloud.aiplatform.v1.ModelContainerSpec.ports\] field.
///
/// If you don't specify this field, it defaults to the following value when
/// you [deploy this Model to an Endpoint]\[google.cloud.aiplatform.v1.EndpointService.DeployModel\]:
/// <code>/v1/endpoints/<var>ENDPOINT</var>/deployedModels/<var>DEPLOYED_MODEL</var>:predict</code>
/// The placeholders in this value are replaced as follows:
///
/// * <var>ENDPOINT</var>: The last segment (following `endpoints/`)of the
/// Endpoint.name][] field of the Endpoint where this Model has been
/// deployed. (Vertex AI makes this value available to your container code
/// as the [`AIP_ENDPOINT_ID` environment
/// variable](<https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables>).)
///
/// * <var>DEPLOYED_MODEL</var>: \[DeployedModel.id][google.cloud.aiplatform.v1.DeployedModel.id\] of the `DeployedModel`.
/// (Vertex AI makes this value available to your container code as the
/// [`AIP_DEPLOYED_MODEL_ID` environment
/// variable](<https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables>).)
#[prost(string, tag = "7")]
pub health_route: ::prost::alloc::string::String,
}
/// Represents a network port in a container.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Port {
/// The number of the port to expose on the pod's IP address.
/// Must be a valid port number, between 1 and 65535 inclusive.
#[prost(int32, tag = "3")]
pub container_port: i32,
}
/// Describes the state of a pipeline.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum PipelineState {
/// The pipeline state is unspecified.
Unspecified = 0,
/// The pipeline has been created or resumed, and processing has not yet
/// begun.
Queued = 1,
/// The service is preparing to run the pipeline.
Pending = 2,
/// The pipeline is in progress.
Running = 3,
/// The pipeline completed successfully.
Succeeded = 4,
/// The pipeline failed.
Failed = 5,
/// The pipeline is being cancelled. From this state, the pipeline may only go
/// to either PIPELINE_STATE_SUCCEEDED, PIPELINE_STATE_FAILED or
/// PIPELINE_STATE_CANCELLED.
Cancelling = 6,
/// The pipeline has been cancelled.
Cancelled = 7,
/// The pipeline has been stopped, and can be resumed.
Paused = 8,
}
/// The TrainingPipeline orchestrates tasks associated with training a Model. It
/// always executes the training task, and optionally may also
/// export data from Vertex AI's Dataset which becomes the training input,
/// \[upload][google.cloud.aiplatform.v1.ModelService.UploadModel\] the Model to Vertex AI, and evaluate the
/// Model.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TrainingPipeline {
/// Output only. Resource name of the TrainingPipeline.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. The user-defined name of this TrainingPipeline.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// Specifies Vertex AI owned input data that may be used for training the
/// Model. The TrainingPipeline's \[training_task_definition][google.cloud.aiplatform.v1.TrainingPipeline.training_task_definition\] should make
/// clear whether this config is used and if there are any special requirements
/// on how it should be filled. If nothing about this config is mentioned in
/// the \[training_task_definition][google.cloud.aiplatform.v1.TrainingPipeline.training_task_definition\], then it should be assumed that the
/// TrainingPipeline does not depend on this configuration.
#[prost(message, optional, tag = "3")]
pub input_data_config: ::core::option::Option<InputDataConfig>,
/// Required. A Google Cloud Storage path to the YAML file that defines the training task
/// which is responsible for producing the model artifact, and may also include
/// additional auxiliary work.
/// The definition files that can be used here are found in
/// gs://google-cloud-aiplatform/schema/trainingjob/definition/.
/// Note: The URI given on output will be immutable and probably different,
/// including the URI scheme, than the one given on input. The output URI will
/// point to a location where the user only has a read access.
#[prost(string, tag = "4")]
pub training_task_definition: ::prost::alloc::string::String,
/// Required. The training task's parameter(s), as specified in the
/// \[training_task_definition][google.cloud.aiplatform.v1.TrainingPipeline.training_task_definition\]'s `inputs`.
#[prost(message, optional, tag = "5")]
pub training_task_inputs: ::core::option::Option<::prost_types::Value>,
/// Output only. The metadata information as specified in the \[training_task_definition][google.cloud.aiplatform.v1.TrainingPipeline.training_task_definition\]'s
/// `metadata`. This metadata is an auxiliary runtime and final information
/// about the training task. While the pipeline is running this information is
/// populated only at a best effort basis. Only present if the
/// pipeline's \[training_task_definition][google.cloud.aiplatform.v1.TrainingPipeline.training_task_definition\] contains `metadata` object.
#[prost(message, optional, tag = "6")]
pub training_task_metadata: ::core::option::Option<::prost_types::Value>,
/// Describes the Model that may be uploaded (via \[ModelService.UploadModel][google.cloud.aiplatform.v1.ModelService.UploadModel\])
/// by this TrainingPipeline. The TrainingPipeline's
/// \[training_task_definition][google.cloud.aiplatform.v1.TrainingPipeline.training_task_definition\] should make clear whether this Model
/// description should be populated, and if there are any special requirements
/// regarding how it should be filled. If nothing is mentioned in the
/// \[training_task_definition][google.cloud.aiplatform.v1.TrainingPipeline.training_task_definition\], then it should be assumed that this field
/// should not be filled and the training task either uploads the Model without
/// a need of this information, or that training task does not support
/// uploading a Model as part of the pipeline.
/// When the Pipeline's state becomes `PIPELINE_STATE_SUCCEEDED` and
/// the trained Model had been uploaded into Vertex AI, then the
/// model_to_upload's resource \[name][google.cloud.aiplatform.v1.Model.name\] is populated. The Model
/// is always uploaded into the Project and Location in which this pipeline
/// is.
#[prost(message, optional, tag = "7")]
pub model_to_upload: ::core::option::Option<Model>,
/// Output only. The detailed state of the pipeline.
#[prost(enumeration = "PipelineState", tag = "9")]
pub state: i32,
/// Output only. Only populated when the pipeline's state is `PIPELINE_STATE_FAILED` or
/// `PIPELINE_STATE_CANCELLED`.
#[prost(message, optional, tag = "10")]
pub error: ::core::option::Option<super::super::super::rpc::Status>,
/// Output only. Time when the TrainingPipeline was created.
#[prost(message, optional, tag = "11")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Time when the TrainingPipeline for the first time entered the
/// `PIPELINE_STATE_RUNNING` state.
#[prost(message, optional, tag = "12")]
pub start_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Time when the TrainingPipeline entered any of the following states:
/// `PIPELINE_STATE_SUCCEEDED`, `PIPELINE_STATE_FAILED`,
/// `PIPELINE_STATE_CANCELLED`.
#[prost(message, optional, tag = "13")]
pub end_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Time when the TrainingPipeline was most recently updated.
#[prost(message, optional, tag = "14")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// The labels with user-defined metadata to organize TrainingPipelines.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
///
/// See <https://goo.gl/xmQnxf> for more information and examples of labels.
#[prost(map = "string, string", tag = "15")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Customer-managed encryption key spec for a TrainingPipeline. If set, this
/// TrainingPipeline will be secured by this key.
///
/// Note: Model trained by this TrainingPipeline is also secured by this key if
/// \[model_to_upload][google.cloud.aiplatform.v1.TrainingPipeline.encryption_spec\] is not set separately.
#[prost(message, optional, tag = "18")]
pub encryption_spec: ::core::option::Option<EncryptionSpec>,
}
/// Specifies Vertex AI owned input data to be used for training, and
/// possibly evaluating, the Model.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct InputDataConfig {
/// Required. The ID of the Dataset in the same Project and Location which data will be
/// used to train the Model. The Dataset must use schema compatible with
/// Model being trained, and what is compatible should be described in the
/// used TrainingPipeline's \[training_task_definition\]
/// \[google.cloud.aiplatform.v1.TrainingPipeline.training_task_definition\].
/// For tabular Datasets, all their data is exported to training, to pick
/// and choose from.
#[prost(string, tag = "1")]
pub dataset_id: ::prost::alloc::string::String,
/// Applicable only to Datasets that have DataItems and Annotations.
///
/// A filter on Annotations of the Dataset. Only Annotations that both
/// match this filter and belong to DataItems not ignored by the split method
/// are used in respectively training, validation or test role, depending on
/// the role of the DataItem they are on (for the auto-assigned that role is
/// decided by Vertex AI). A filter with same syntax as the one used in
/// \[ListAnnotations][google.cloud.aiplatform.v1.DatasetService.ListAnnotations\] may be used, but note
/// here it filters across all Annotations of the Dataset, and not just within
/// a single DataItem.
#[prost(string, tag = "6")]
pub annotations_filter: ::prost::alloc::string::String,
/// Applicable only to custom training with Datasets that have DataItems and
/// Annotations.
///
/// Cloud Storage URI that points to a YAML file describing the annotation
/// schema. The schema is defined as an OpenAPI 3.0.2 [Schema
/// Object](<https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject>).
/// The schema files that can be used here are found in
/// gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
/// chosen schema must be consistent with
/// \[metadata][google.cloud.aiplatform.v1.Dataset.metadata_schema_uri\] of the Dataset specified by
/// \[dataset_id][google.cloud.aiplatform.v1.InputDataConfig.dataset_id\].
///
/// Only Annotations that both match this schema and belong to DataItems not
/// ignored by the split method are used in respectively training, validation
/// or test role, depending on the role of the DataItem they are on.
///
/// When used in conjunction with \[annotations_filter][google.cloud.aiplatform.v1.InputDataConfig.annotations_filter\], the Annotations used
/// for training are filtered by both \[annotations_filter][google.cloud.aiplatform.v1.InputDataConfig.annotations_filter\] and
/// \[annotation_schema_uri][google.cloud.aiplatform.v1.InputDataConfig.annotation_schema_uri\].
#[prost(string, tag = "9")]
pub annotation_schema_uri: ::prost::alloc::string::String,
/// The instructions how the input data should be split between the
/// training, validation and test sets.
/// If no split type is provided, the \[fraction_split][google.cloud.aiplatform.v1.InputDataConfig.fraction_split\] is used by default.
#[prost(oneof = "input_data_config::Split", tags = "2, 3, 4, 5")]
pub split: ::core::option::Option<input_data_config::Split>,
/// Only applicable to Custom and Hyperparameter Tuning TrainingPipelines.
///
/// The destination of the training data to be written to.
///
/// Supported destination file formats:
/// * For non-tabular data: "jsonl".
/// * For tabular data: "csv" and "bigquery".
///
/// The following Vertex AI environment variables are passed to containers
/// or python modules of the training task when this field is set:
///
/// * AIP_DATA_FORMAT : Exported data format.
/// * AIP_TRAINING_DATA_URI : Sharded exported training data uris.
/// * AIP_VALIDATION_DATA_URI : Sharded exported validation data uris.
/// * AIP_TEST_DATA_URI : Sharded exported test data uris.
#[prost(oneof = "input_data_config::Destination", tags = "8, 10")]
pub destination: ::core::option::Option<input_data_config::Destination>,
}
/// Nested message and enum types in `InputDataConfig`.
pub mod input_data_config {
/// The instructions how the input data should be split between the
/// training, validation and test sets.
/// If no split type is provided, the \[fraction_split][google.cloud.aiplatform.v1.InputDataConfig.fraction_split\] is used by default.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Split {
/// Split based on fractions defining the size of each set.
#[prost(message, tag = "2")]
FractionSplit(super::FractionSplit),
/// Split based on the provided filters for each set.
#[prost(message, tag = "3")]
FilterSplit(super::FilterSplit),
/// Supported only for tabular Datasets.
///
/// Split based on a predefined key.
#[prost(message, tag = "4")]
PredefinedSplit(super::PredefinedSplit),
/// Supported only for tabular Datasets.
///
/// Split based on the timestamp of the input data pieces.
#[prost(message, tag = "5")]
TimestampSplit(super::TimestampSplit),
}
/// Only applicable to Custom and Hyperparameter Tuning TrainingPipelines.
///
/// The destination of the training data to be written to.
///
/// Supported destination file formats:
/// * For non-tabular data: "jsonl".
/// * For tabular data: "csv" and "bigquery".
///
/// The following Vertex AI environment variables are passed to containers
/// or python modules of the training task when this field is set:
///
/// * AIP_DATA_FORMAT : Exported data format.
/// * AIP_TRAINING_DATA_URI : Sharded exported training data uris.
/// * AIP_VALIDATION_DATA_URI : Sharded exported validation data uris.
/// * AIP_TEST_DATA_URI : Sharded exported test data uris.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Destination {
/// The Cloud Storage location where the training data is to be
/// written to. In the given directory a new directory is created with
/// name:
/// `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
/// where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
/// All training input data is written into that directory.
///
/// The Vertex AI environment variables representing Cloud Storage
/// data URIs are represented in the Cloud Storage wildcard
/// format to support sharded data. e.g.: "gs://.../training-*.jsonl"
///
/// * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
/// * AIP_TRAINING_DATA_URI =
/// "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
///
/// * AIP_VALIDATION_DATA_URI =
/// "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
///
/// * AIP_TEST_DATA_URI =
/// "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/test-*.${AIP_DATA_FORMAT}"
#[prost(message, tag = "8")]
GcsDestination(super::GcsDestination),
/// Only applicable to custom training with tabular Dataset with BigQuery
/// source.
///
/// The BigQuery project location where the training data is to be written
/// to. In the given project a new dataset is created with name
/// `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
/// where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
/// input data is written into that dataset. In the dataset three
/// tables are created, `training`, `validation` and `test`.
///
/// * AIP_DATA_FORMAT = "bigquery".
/// * AIP_TRAINING_DATA_URI =
/// "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
///
/// * AIP_VALIDATION_DATA_URI =
/// "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.validation"
///
/// * AIP_TEST_DATA_URI =
/// "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.test"
#[prost(message, tag = "10")]
BigqueryDestination(super::BigQueryDestination),
}
}
/// Assigns the input data to training, validation, and test sets as per the
/// given fractions. Any of `training_fraction`, `validation_fraction` and
/// `test_fraction` may optionally be provided, they must sum to up to 1. If the
/// provided ones sum to less than 1, the remainder is assigned to sets as
/// decided by Vertex AI. If none of the fractions are set, by default roughly
/// 80% of data is used for training, 10% for validation, and 10% for test.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FractionSplit {
/// The fraction of the input data that is to be used to train the Model.
#[prost(double, tag = "1")]
pub training_fraction: f64,
/// The fraction of the input data that is to be used to validate the Model.
#[prost(double, tag = "2")]
pub validation_fraction: f64,
/// The fraction of the input data that is to be used to evaluate the Model.
#[prost(double, tag = "3")]
pub test_fraction: f64,
}
/// Assigns input data to training, validation, and test sets based on the given
/// filters, data pieces not matched by any filter are ignored. Currently only
/// supported for Datasets containing DataItems.
/// If any of the filters in this message are to match nothing, then they can be
/// set as '-' (the minus sign).
///
/// Supported only for unstructured Datasets.
///
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FilterSplit {
/// Required. A filter on DataItems of the Dataset. DataItems that match
/// this filter are used to train the Model. A filter with same syntax
/// as the one used in \[DatasetService.ListDataItems][google.cloud.aiplatform.v1.DatasetService.ListDataItems\] may be used. If a
/// single DataItem is matched by more than one of the FilterSplit filters,
/// then it is assigned to the first set that applies to it in the
/// training, validation, test order.
#[prost(string, tag = "1")]
pub training_filter: ::prost::alloc::string::String,
/// Required. A filter on DataItems of the Dataset. DataItems that match
/// this filter are used to validate the Model. A filter with same syntax
/// as the one used in \[DatasetService.ListDataItems][google.cloud.aiplatform.v1.DatasetService.ListDataItems\] may be used. If a
/// single DataItem is matched by more than one of the FilterSplit filters,
/// then it is assigned to the first set that applies to it in the
/// training, validation, test order.
#[prost(string, tag = "2")]
pub validation_filter: ::prost::alloc::string::String,
/// Required. A filter on DataItems of the Dataset. DataItems that match
/// this filter are used to test the Model. A filter with same syntax
/// as the one used in \[DatasetService.ListDataItems][google.cloud.aiplatform.v1.DatasetService.ListDataItems\] may be used. If a
/// single DataItem is matched by more than one of the FilterSplit filters,
/// then it is assigned to the first set that applies to it in the
/// training, validation, test order.
#[prost(string, tag = "3")]
pub test_filter: ::prost::alloc::string::String,
}
/// Assigns input data to training, validation, and test sets based on the
/// value of a provided key.
///
/// Supported only for tabular Datasets.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PredefinedSplit {
/// Required. The key is a name of one of the Dataset's data columns.
/// The value of the key (either the label's value or value in the column)
/// must be one of {`training`, `validation`, `test`}, and it defines to which
/// set the given piece of data is assigned. If for a piece of data the key
/// is not present or has an invalid value, that piece is ignored by the
/// pipeline.
#[prost(string, tag = "1")]
pub key: ::prost::alloc::string::String,
}
/// Assigns input data to training, validation, and test sets based on a
/// provided timestamps. The youngest data pieces are assigned to training set,
/// next to validation set, and the oldest to the test set.
///
/// Supported only for tabular Datasets.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TimestampSplit {
/// The fraction of the input data that is to be used to train the Model.
#[prost(double, tag = "1")]
pub training_fraction: f64,
/// The fraction of the input data that is to be used to validate the Model.
#[prost(double, tag = "2")]
pub validation_fraction: f64,
/// The fraction of the input data that is to be used to evaluate the Model.
#[prost(double, tag = "3")]
pub test_fraction: f64,
/// Required. The key is a name of one of the Dataset's data columns.
/// The values of the key (the values in the column) must be in RFC 3339
/// `date-time` format, where `time-offset` = `"Z"`
/// (e.g. 1985-04-12T23:20:50.52Z). If for a piece of data the key is not
/// present or has an invalid value, that piece is ignored by the pipeline.
#[prost(string, tag = "4")]
pub key: ::prost::alloc::string::String,
}
/// Request message for \[DatasetService.CreateDataset][google.cloud.aiplatform.v1.DatasetService.CreateDataset\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateDatasetRequest {
/// Required. The resource name of the Location to create the Dataset in.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The Dataset to create.
#[prost(message, optional, tag = "2")]
pub dataset: ::core::option::Option<Dataset>,
}
/// Runtime operation information for \[DatasetService.CreateDataset][google.cloud.aiplatform.v1.DatasetService.CreateDataset\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateDatasetOperationMetadata {
/// The operation generic information.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
/// Request message for \[DatasetService.GetDataset][google.cloud.aiplatform.v1.DatasetService.GetDataset\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetDatasetRequest {
/// Required. The name of the Dataset resource.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Mask specifying which fields to read.
#[prost(message, optional, tag = "2")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Request message for \[DatasetService.UpdateDataset][google.cloud.aiplatform.v1.DatasetService.UpdateDataset\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateDatasetRequest {
/// Required. The Dataset which replaces the resource on the server.
#[prost(message, optional, tag = "1")]
pub dataset: ::core::option::Option<Dataset>,
/// Required. The update mask applies to the resource.
/// For the `FieldMask` definition, see \[google.protobuf.FieldMask][google.protobuf.FieldMask\].
/// Updatable fields:
///
/// * `display_name`
/// * `description`
/// * `labels`
#[prost(message, optional, tag = "2")]
pub update_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Request message for \[DatasetService.ListDatasets][google.cloud.aiplatform.v1.DatasetService.ListDatasets\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListDatasetsRequest {
/// Required. The name of the Dataset's parent resource.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// An expression for filtering the results of the request. For field names
/// both snake_case and camelCase are supported.
///
/// * `display_name`: supports = and !=
/// * `metadata_schema_uri`: supports = and !=
/// * `labels` supports general map functions that is:
/// * `labels.key=value` - key:value equality
/// * `labels.key:* or labels:key - key existence
/// * A key including a space must be quoted. `labels."a key"`.
///
/// Some examples:
/// * `displayName="myDisplayName"`
/// * `labels.myKey="myValue"`
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The standard list page size.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// The standard list page token.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Mask specifying which fields to read.
#[prost(message, optional, tag = "5")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
/// A comma-separated list of fields to order by, sorted in ascending order.
/// Use "desc" after a field name for descending.
/// Supported fields:
/// * `display_name`
/// * `create_time`
/// * `update_time`
#[prost(string, tag = "6")]
pub order_by: ::prost::alloc::string::String,
}
/// Response message for \[DatasetService.ListDatasets][google.cloud.aiplatform.v1.DatasetService.ListDatasets\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListDatasetsResponse {
/// A list of Datasets that matches the specified filter in the request.
#[prost(message, repeated, tag = "1")]
pub datasets: ::prost::alloc::vec::Vec<Dataset>,
/// The standard List next-page token.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[DatasetService.DeleteDataset][google.cloud.aiplatform.v1.DatasetService.DeleteDataset\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteDatasetRequest {
/// Required. The resource name of the Dataset to delete.
/// Format:
/// `projects/{project}/locations/{location}/datasets/{dataset}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[DatasetService.ImportData][google.cloud.aiplatform.v1.DatasetService.ImportData\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ImportDataRequest {
/// Required. The name of the Dataset resource.
/// Format:
/// `projects/{project}/locations/{location}/datasets/{dataset}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. The desired input locations. The contents of all input locations will be
/// imported in one batch.
#[prost(message, repeated, tag = "2")]
pub import_configs: ::prost::alloc::vec::Vec<ImportDataConfig>,
}
/// Response message for \[DatasetService.ImportData][google.cloud.aiplatform.v1.DatasetService.ImportData\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ImportDataResponse {}
/// Runtime operation information for \[DatasetService.ImportData][google.cloud.aiplatform.v1.DatasetService.ImportData\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ImportDataOperationMetadata {
/// The common part of the operation metadata.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
/// Request message for \[DatasetService.ExportData][google.cloud.aiplatform.v1.DatasetService.ExportData\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExportDataRequest {
/// Required. The name of the Dataset resource.
/// Format:
/// `projects/{project}/locations/{location}/datasets/{dataset}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. The desired output location.
#[prost(message, optional, tag = "2")]
pub export_config: ::core::option::Option<ExportDataConfig>,
}
/// Response message for \[DatasetService.ExportData][google.cloud.aiplatform.v1.DatasetService.ExportData\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExportDataResponse {
/// All of the files that are exported in this export operation.
#[prost(string, repeated, tag = "1")]
pub exported_files: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
/// Runtime operation information for \[DatasetService.ExportData][google.cloud.aiplatform.v1.DatasetService.ExportData\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExportDataOperationMetadata {
/// The common part of the operation metadata.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
/// A Google Cloud Storage directory which path ends with '/'. The exported
/// data is stored in the directory.
#[prost(string, tag = "2")]
pub gcs_output_directory: ::prost::alloc::string::String,
}
/// Request message for \[DatasetService.ListDataItems][google.cloud.aiplatform.v1.DatasetService.ListDataItems\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListDataItemsRequest {
/// Required. The resource name of the Dataset to list DataItems from.
/// Format:
/// `projects/{project}/locations/{location}/datasets/{dataset}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The standard list filter.
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The standard list page size.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// The standard list page token.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Mask specifying which fields to read.
#[prost(message, optional, tag = "5")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
/// A comma-separated list of fields to order by, sorted in ascending order.
/// Use "desc" after a field name for descending.
#[prost(string, tag = "6")]
pub order_by: ::prost::alloc::string::String,
}
/// Response message for \[DatasetService.ListDataItems][google.cloud.aiplatform.v1.DatasetService.ListDataItems\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListDataItemsResponse {
/// A list of DataItems that matches the specified filter in the request.
#[prost(message, repeated, tag = "1")]
pub data_items: ::prost::alloc::vec::Vec<DataItem>,
/// The standard List next-page token.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[DatasetService.GetAnnotationSpec][google.cloud.aiplatform.v1.DatasetService.GetAnnotationSpec\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetAnnotationSpecRequest {
/// Required. The name of the AnnotationSpec resource.
/// Format:
/// `projects/{project}/locations/{location}/datasets/{dataset}/annotationSpecs/{annotation_spec}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Mask specifying which fields to read.
#[prost(message, optional, tag = "2")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Request message for \[DatasetService.ListAnnotations][google.cloud.aiplatform.v1.DatasetService.ListAnnotations\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListAnnotationsRequest {
/// Required. The resource name of the DataItem to list Annotations from.
/// Format:
/// `projects/{project}/locations/{location}/datasets/{dataset}/dataItems/{data_item}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The standard list filter.
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The standard list page size.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// The standard list page token.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Mask specifying which fields to read.
#[prost(message, optional, tag = "5")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
/// A comma-separated list of fields to order by, sorted in ascending order.
/// Use "desc" after a field name for descending.
#[prost(string, tag = "6")]
pub order_by: ::prost::alloc::string::String,
}
/// Response message for \[DatasetService.ListAnnotations][google.cloud.aiplatform.v1.DatasetService.ListAnnotations\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListAnnotationsResponse {
/// A list of Annotations that matches the specified filter in the request.
#[prost(message, repeated, tag = "1")]
pub annotations: ::prost::alloc::vec::Vec<Annotation>,
/// The standard List next-page token.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
#[doc = r" Generated client implementations."]
pub mod dataset_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " The service that handles the CRUD of Vertex AI Dataset and its child"]
#[doc = " resources."]
#[derive(Debug, Clone)]
pub struct DatasetServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> DatasetServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> DatasetServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
DatasetServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Creates a Dataset."]
pub async fn create_dataset(
&mut self,
request: impl tonic::IntoRequest<super::CreateDatasetRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.DatasetService/CreateDataset",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets a Dataset."]
pub async fn get_dataset(
&mut self,
request: impl tonic::IntoRequest<super::GetDatasetRequest>,
) -> Result<tonic::Response<super::Dataset>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.DatasetService/GetDataset",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates a Dataset."]
pub async fn update_dataset(
&mut self,
request: impl tonic::IntoRequest<super::UpdateDatasetRequest>,
) -> Result<tonic::Response<super::Dataset>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.DatasetService/UpdateDataset",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists Datasets in a Location."]
pub async fn list_datasets(
&mut self,
request: impl tonic::IntoRequest<super::ListDatasetsRequest>,
) -> Result<tonic::Response<super::ListDatasetsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.DatasetService/ListDatasets",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a Dataset."]
pub async fn delete_dataset(
&mut self,
request: impl tonic::IntoRequest<super::DeleteDatasetRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.DatasetService/DeleteDataset",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Imports data into a Dataset."]
pub async fn import_data(
&mut self,
request: impl tonic::IntoRequest<super::ImportDataRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.DatasetService/ImportData",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Exports data from a Dataset."]
pub async fn export_data(
&mut self,
request: impl tonic::IntoRequest<super::ExportDataRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.DatasetService/ExportData",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists DataItems in a Dataset."]
pub async fn list_data_items(
&mut self,
request: impl tonic::IntoRequest<super::ListDataItemsRequest>,
) -> Result<tonic::Response<super::ListDataItemsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.DatasetService/ListDataItems",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets an AnnotationSpec."]
pub async fn get_annotation_spec(
&mut self,
request: impl tonic::IntoRequest<super::GetAnnotationSpecRequest>,
) -> Result<tonic::Response<super::AnnotationSpec>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.DatasetService/GetAnnotationSpec",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists Annotations belongs to a dataitem"]
pub async fn list_annotations(
&mut self,
request: impl tonic::IntoRequest<super::ListAnnotationsRequest>,
) -> Result<tonic::Response<super::ListAnnotationsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.DatasetService/ListAnnotations",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
}
/// Points to a DeployedIndex.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeployedIndexRef {
/// Immutable. A resource name of the IndexEndpoint.
#[prost(string, tag = "1")]
pub index_endpoint: ::prost::alloc::string::String,
/// Immutable. The ID of the DeployedIndex in the above IndexEndpoint.
#[prost(string, tag = "2")]
pub deployed_index_id: ::prost::alloc::string::String,
}
/// Stats and Anomaly generated at specific timestamp for specific Feature.
/// The start_time and end_time are used to define the time range of the dataset
/// that current stats belongs to, e.g. prediction traffic is bucketed into
/// prediction datasets by time window. If the Dataset is not defined by time
/// window, start_time = end_time. Timestamp of the stats and anomalies always
/// refers to end_time. Raw stats and anomalies are stored in stats_uri or
/// anomaly_uri in the tensorflow defined protos. Field data_stats contains
/// almost identical information with the raw stats in Vertex AI
/// defined proto, for UI to display.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FeatureStatsAnomaly {
/// Feature importance score, only populated when cross-feature monitoring is
/// enabled. For now only used to represent feature attribution score within
/// range [0, 1] for
/// \[ModelDeploymentMonitoringObjectiveType.FEATURE_ATTRIBUTION_SKEW][google.cloud.aiplatform.v1.ModelDeploymentMonitoringObjectiveType.FEATURE_ATTRIBUTION_SKEW\] and
/// \[ModelDeploymentMonitoringObjectiveType.FEATURE_ATTRIBUTION_DRIFT][google.cloud.aiplatform.v1.ModelDeploymentMonitoringObjectiveType.FEATURE_ATTRIBUTION_DRIFT\].
#[prost(double, tag = "1")]
pub score: f64,
/// Path of the stats file for current feature values in Cloud Storage bucket.
/// Format: gs://<bucket_name>/<object_name>/stats.
/// Example: gs://monitoring_bucket/feature_name/stats.
/// Stats are stored as binary format with Protobuf message
/// \[tensorflow.metadata.v0.FeatureNameStatistics\](<https://github.com/tensorflow/metadata/blob/master/tensorflow_metadata/proto/v0/statistics.proto>).
#[prost(string, tag = "3")]
pub stats_uri: ::prost::alloc::string::String,
/// Path of the anomaly file for current feature values in Cloud Storage
/// bucket.
/// Format: gs://<bucket_name>/<object_name>/anomalies.
/// Example: gs://monitoring_bucket/feature_name/anomalies.
/// Stats are stored as binary format with Protobuf message
/// Anoamlies are stored as binary format with Protobuf message
/// \[tensorflow.metadata.v0.AnomalyInfo\]
/// (<https://github.com/tensorflow/metadata/blob/master/tensorflow_metadata/proto/v0/anomalies.proto>).
#[prost(string, tag = "4")]
pub anomaly_uri: ::prost::alloc::string::String,
/// Deviation from the current stats to baseline stats.
/// 1. For categorical feature, the distribution distance is calculated by
/// L-inifinity norm.
/// 2. For numerical feature, the distribution distance is calculated by
/// Jensen–Shannon divergence.
#[prost(double, tag = "5")]
pub distribution_deviation: f64,
/// This is the threshold used when detecting anomalies.
/// The threshold can be changed by user, so this one might be different from
/// \[ThresholdConfig.value][google.cloud.aiplatform.v1.ThresholdConfig.value\].
#[prost(double, tag = "9")]
pub anomaly_detection_threshold: f64,
/// The start timestamp of window where stats were generated.
/// For objectives where time window doesn't make sense (e.g. Featurestore
/// Snapshot Monitoring), start_time is only used to indicate the monitoring
/// intervals, so it always equals to (end_time - monitoring_interval).
#[prost(message, optional, tag = "7")]
pub start_time: ::core::option::Option<::prost_types::Timestamp>,
/// The end timestamp of window where stats were generated.
/// For objectives where time window doesn't make sense (e.g. Featurestore
/// Snapshot Monitoring), end_time indicates the timestamp of the data used to
/// generate stats (e.g. timestamp we take snapshots for feature values).
#[prost(message, optional, tag = "8")]
pub end_time: ::core::option::Option<::prost_types::Timestamp>,
}
/// Represents a job that runs periodically to monitor the deployed models in an
/// endpoint. It will analyze the logged training & prediction data to detect any
/// abnormal behaviors.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ModelDeploymentMonitoringJob {
/// Output only. Resource name of a ModelDeploymentMonitoringJob.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. The user-defined name of the ModelDeploymentMonitoringJob.
/// The name can be up to 128 characters long and can be consist of any UTF-8
/// characters.
/// Display name of a ModelDeploymentMonitoringJob.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// Required. Endpoint resource name.
/// Format: `projects/{project}/locations/{location}/endpoints/{endpoint}`
#[prost(string, tag = "3")]
pub endpoint: ::prost::alloc::string::String,
/// Output only. The detailed state of the monitoring job.
/// When the job is still creating, the state will be 'PENDING'.
/// Once the job is successfully created, the state will be 'RUNNING'.
/// Pause the job, the state will be 'PAUSED'.
/// Resume the job, the state will return to 'RUNNING'.
#[prost(enumeration = "JobState", tag = "4")]
pub state: i32,
/// Output only. Schedule state when the monitoring job is in Running state.
#[prost(enumeration = "model_deployment_monitoring_job::MonitoringScheduleState", tag = "5")]
pub schedule_state: i32,
/// Required. The config for monitoring objectives. This is a per DeployedModel config.
/// Each DeployedModel needs to be configured separately.
#[prost(message, repeated, tag = "6")]
pub model_deployment_monitoring_objective_configs:
::prost::alloc::vec::Vec<ModelDeploymentMonitoringObjectiveConfig>,
/// Required. Schedule config for running the monitoring job.
#[prost(message, optional, tag = "7")]
pub model_deployment_monitoring_schedule_config:
::core::option::Option<ModelDeploymentMonitoringScheduleConfig>,
/// Required. Sample Strategy for logging.
#[prost(message, optional, tag = "8")]
pub logging_sampling_strategy: ::core::option::Option<SamplingStrategy>,
/// Alert config for model monitoring.
#[prost(message, optional, tag = "15")]
pub model_monitoring_alert_config: ::core::option::Option<ModelMonitoringAlertConfig>,
/// YAML schema file uri describing the format of a single instance,
/// which are given to format this Endpoint's prediction (and explanation).
/// If not set, we will generate predict schema from collected predict
/// requests.
#[prost(string, tag = "9")]
pub predict_instance_schema_uri: ::prost::alloc::string::String,
/// Sample Predict instance, same format as \[PredictRequest.instances][google.cloud.aiplatform.v1.PredictRequest.instances\],
/// this can be set as a replacement of
/// \[ModelDeploymentMonitoringJob.predict_instance_schema_uri][google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob.predict_instance_schema_uri\]. If not set,
/// we will generate predict schema from collected predict requests.
#[prost(message, optional, tag = "19")]
pub sample_predict_instance: ::core::option::Option<::prost_types::Value>,
/// YAML schema file uri describing the format of a single instance that you
/// want Tensorflow Data Validation (TFDV) to analyze.
///
/// If this field is empty, all the feature data types are inferred from
/// \[predict_instance_schema_uri][google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob.predict_instance_schema_uri\],
/// meaning that TFDV will use the data in the exact format(data type) as
/// prediction request/response.
/// If there are any data type differences between predict instance and TFDV
/// instance, this field can be used to override the schema.
/// For models trained with Vertex AI, this field must be set as all the
/// fields in predict instance formatted as string.
#[prost(string, tag = "16")]
pub analysis_instance_schema_uri: ::prost::alloc::string::String,
/// Output only. The created bigquery tables for the job under customer project. Customer
/// could do their own query & analysis. There could be 4 log tables in
/// maximum:
/// 1. Training data logging predict request/response
/// 2. Serving data logging predict request/response
#[prost(message, repeated, tag = "10")]
pub bigquery_tables: ::prost::alloc::vec::Vec<ModelDeploymentMonitoringBigQueryTable>,
/// The TTL of BigQuery tables in user projects which stores logs.
/// A day is the basic unit of the TTL and we take the ceil of TTL/86400(a
/// day). e.g. { second: 3600} indicates ttl = 1 day.
#[prost(message, optional, tag = "17")]
pub log_ttl: ::core::option::Option<::prost_types::Duration>,
/// The labels with user-defined metadata to organize your
/// ModelDeploymentMonitoringJob.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
///
/// See <https://goo.gl/xmQnxf> for more information and examples of labels.
#[prost(map = "string, string", tag = "11")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Output only. Timestamp when this ModelDeploymentMonitoringJob was created.
#[prost(message, optional, tag = "12")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this ModelDeploymentMonitoringJob was updated most recently.
#[prost(message, optional, tag = "13")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this monitoring pipeline will be scheduled to run for the
/// next round.
#[prost(message, optional, tag = "14")]
pub next_schedule_time: ::core::option::Option<::prost_types::Timestamp>,
/// Stats anomalies base folder path.
#[prost(message, optional, tag = "20")]
pub stats_anomalies_base_directory: ::core::option::Option<GcsDestination>,
/// Customer-managed encryption key spec for a ModelDeploymentMonitoringJob. If
/// set, this ModelDeploymentMonitoringJob and all sub-resources of this
/// ModelDeploymentMonitoringJob will be secured by this key.
#[prost(message, optional, tag = "21")]
pub encryption_spec: ::core::option::Option<EncryptionSpec>,
/// If true, the scheduled monitoring pipeline status logs are sent to
/// Google Cloud Logging. Please note the logs incur cost, which are subject to
/// [Cloud Logging pricing](<https://cloud.google.com/logging#pricing>).
#[prost(bool, tag = "22")]
pub enable_monitoring_pipeline_logs: bool,
/// Output only. Only populated when the job's state is `JOB_STATE_FAILED` or
/// `JOB_STATE_CANCELLED`.
#[prost(message, optional, tag = "23")]
pub error: ::core::option::Option<super::super::super::rpc::Status>,
}
/// Nested message and enum types in `ModelDeploymentMonitoringJob`.
pub mod model_deployment_monitoring_job {
/// The state to Specify the monitoring pipeline.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum MonitoringScheduleState {
/// Unspecified state.
Unspecified = 0,
/// The pipeline is picked up and wait to run.
Pending = 1,
/// The pipeline is offline and will be scheduled for next run.
Offline = 2,
/// The pipeline is running.
Running = 3,
}
}
/// ModelDeploymentMonitoringBigQueryTable specifies the BigQuery table name
/// as well as some information of the logs stored in this table.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ModelDeploymentMonitoringBigQueryTable {
/// The source of log.
#[prost(enumeration = "model_deployment_monitoring_big_query_table::LogSource", tag = "1")]
pub log_source: i32,
/// The type of log.
#[prost(enumeration = "model_deployment_monitoring_big_query_table::LogType", tag = "2")]
pub log_type: i32,
/// The created BigQuery table to store logs. Customer could do their own query
/// & analysis. Format:
/// `bq://<project_id>.model_deployment_monitoring_<endpoint_id>.<tolower(log_source)>_<tolower(log_type)>`
#[prost(string, tag = "3")]
pub bigquery_table_path: ::prost::alloc::string::String,
}
/// Nested message and enum types in `ModelDeploymentMonitoringBigQueryTable`.
pub mod model_deployment_monitoring_big_query_table {
/// Indicates where does the log come from.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum LogSource {
/// Unspecified source.
Unspecified = 0,
/// Logs coming from Training dataset.
Training = 1,
/// Logs coming from Serving traffic.
Serving = 2,
}
/// Indicates what type of traffic does the log belong to.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum LogType {
/// Unspecified type.
Unspecified = 0,
/// Predict logs.
Predict = 1,
/// Explain logs.
Explain = 2,
}
}
/// ModelDeploymentMonitoringObjectiveConfig contains the pair of
/// deployed_model_id to ModelMonitoringObjectiveConfig.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ModelDeploymentMonitoringObjectiveConfig {
/// The DeployedModel ID of the objective config.
#[prost(string, tag = "1")]
pub deployed_model_id: ::prost::alloc::string::String,
/// The objective config of for the modelmonitoring job of this deployed model.
#[prost(message, optional, tag = "2")]
pub objective_config: ::core::option::Option<ModelMonitoringObjectiveConfig>,
}
/// The config for scheduling monitoring job.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ModelDeploymentMonitoringScheduleConfig {
/// Required. The model monitoring job running interval. It will be rounded up to next
/// full hour.
#[prost(message, optional, tag = "1")]
pub monitor_interval: ::core::option::Option<::prost_types::Duration>,
}
/// Statistics and anomalies generated by Model Monitoring.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ModelMonitoringStatsAnomalies {
/// Model Monitoring Objective those stats and anomalies belonging to.
#[prost(enumeration = "ModelDeploymentMonitoringObjectiveType", tag = "1")]
pub objective: i32,
/// Deployed Model ID.
#[prost(string, tag = "2")]
pub deployed_model_id: ::prost::alloc::string::String,
/// Number of anomalies within all stats.
#[prost(int32, tag = "3")]
pub anomaly_count: i32,
/// A list of historical Stats and Anomalies generated for all Features.
#[prost(message, repeated, tag = "4")]
pub feature_stats:
::prost::alloc::vec::Vec<model_monitoring_stats_anomalies::FeatureHistoricStatsAnomalies>,
}
/// Nested message and enum types in `ModelMonitoringStatsAnomalies`.
pub mod model_monitoring_stats_anomalies {
/// Historical Stats (and Anomalies) for a specific Feature.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FeatureHistoricStatsAnomalies {
/// Display Name of the Feature.
#[prost(string, tag = "1")]
pub feature_display_name: ::prost::alloc::string::String,
/// Threshold for anomaly detection.
#[prost(message, optional, tag = "3")]
pub threshold: ::core::option::Option<super::ThresholdConfig>,
/// Stats calculated for the Training Dataset.
#[prost(message, optional, tag = "4")]
pub training_stats: ::core::option::Option<super::FeatureStatsAnomaly>,
/// A list of historical stats generated by different time window's
/// Prediction Dataset.
#[prost(message, repeated, tag = "5")]
pub prediction_stats: ::prost::alloc::vec::Vec<super::FeatureStatsAnomaly>,
}
}
/// The Model Monitoring Objective types.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum ModelDeploymentMonitoringObjectiveType {
/// Default value, should not be set.
Unspecified = 0,
/// Raw feature values' stats to detect skew between Training-Prediction
/// datasets.
RawFeatureSkew = 1,
/// Raw feature values' stats to detect drift between Serving-Prediction
/// datasets.
RawFeatureDrift = 2,
/// Feature attribution scores to detect skew between Training-Prediction
/// datasets.
FeatureAttributionSkew = 3,
/// Feature attribution scores to detect skew between Prediction datasets
/// collected within different time windows.
FeatureAttributionDrift = 4,
}
/// Models are deployed into it, and afterwards Endpoint is called to obtain
/// predictions and explanations.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Endpoint {
/// Output only. The resource name of the Endpoint.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. The display name of the Endpoint.
/// The name can be up to 128 characters long and can be consist of any UTF-8
/// characters.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// The description of the Endpoint.
#[prost(string, tag = "3")]
pub description: ::prost::alloc::string::String,
/// Output only. The models deployed in this Endpoint.
/// To add or remove DeployedModels use \[EndpointService.DeployModel][google.cloud.aiplatform.v1.EndpointService.DeployModel\] and
/// \[EndpointService.UndeployModel][google.cloud.aiplatform.v1.EndpointService.UndeployModel\] respectively.
#[prost(message, repeated, tag = "4")]
pub deployed_models: ::prost::alloc::vec::Vec<DeployedModel>,
/// A map from a DeployedModel's ID to the percentage of this Endpoint's
/// traffic that should be forwarded to that DeployedModel.
///
/// If a DeployedModel's ID is not listed in this map, then it receives no
/// traffic.
///
/// The traffic percentage values must add up to 100, or map must be empty if
/// the Endpoint is to not accept any traffic at a moment.
#[prost(map = "string, int32", tag = "5")]
pub traffic_split: ::std::collections::HashMap<::prost::alloc::string::String, i32>,
/// Used to perform consistent read-modify-write updates. If not set, a blind
/// "overwrite" update happens.
#[prost(string, tag = "6")]
pub etag: ::prost::alloc::string::String,
/// The labels with user-defined metadata to organize your Endpoints.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
///
/// See <https://goo.gl/xmQnxf> for more information and examples of labels.
#[prost(map = "string, string", tag = "7")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Output only. Timestamp when this Endpoint was created.
#[prost(message, optional, tag = "8")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this Endpoint was last updated.
#[prost(message, optional, tag = "9")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// Customer-managed encryption key spec for an Endpoint. If set, this
/// Endpoint and all sub-resources of this Endpoint will be secured by
/// this key.
#[prost(message, optional, tag = "10")]
pub encryption_spec: ::core::option::Option<EncryptionSpec>,
/// The full name of the Google Compute Engine
/// \[network\](<https://cloud.google.com//compute/docs/networks-and-firewalls#networks>)
/// to which the Endpoint should be peered.
///
/// Private services access must already be configured for the network. If left
/// unspecified, the Endpoint is not peered with any network.
///
/// \[Format\](<https://cloud.google.com/compute/docs/reference/rest/v1/networks/insert>):
/// `projects/{project}/global/networks/{network}`.
/// Where `{project}` is a project number, as in `12345`, and `{network}` is
/// network name.
#[prost(string, tag = "13")]
pub network: ::prost::alloc::string::String,
/// Output only. Resource name of the Model Monitoring job associated with this Endpoint
/// if monitoring is enabled by \[CreateModelDeploymentMonitoringJob][\].
/// Format:
/// `projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}`
#[prost(string, tag = "14")]
pub model_deployment_monitoring_job: ::prost::alloc::string::String,
}
/// A deployment of a Model. Endpoints contain one or more DeployedModels.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeployedModel {
/// Output only. The ID of the DeployedModel.
#[prost(string, tag = "1")]
pub id: ::prost::alloc::string::String,
/// Required. The name of the Model that this is the deployment of. Note that the Model
/// may be in a different location than the DeployedModel's Endpoint.
#[prost(string, tag = "2")]
pub model: ::prost::alloc::string::String,
/// The display name of the DeployedModel. If not provided upon creation,
/// the Model's display_name is used.
#[prost(string, tag = "3")]
pub display_name: ::prost::alloc::string::String,
/// Output only. Timestamp when the DeployedModel was created.
#[prost(message, optional, tag = "6")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Explanation configuration for this DeployedModel.
///
/// When deploying a Model using \[EndpointService.DeployModel][google.cloud.aiplatform.v1.EndpointService.DeployModel\], this value
/// overrides the value of \[Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec\]. All fields of
/// \[explanation_spec][google.cloud.aiplatform.v1.DeployedModel.explanation_spec\] are optional in the request. If a field of
/// \[explanation_spec][google.cloud.aiplatform.v1.DeployedModel.explanation_spec\] is not populated, the value of the same field of
/// \[Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec\] is inherited. If the corresponding
/// \[Model.explanation_spec][google.cloud.aiplatform.v1.Model.explanation_spec\] is not populated, all fields of the
/// \[explanation_spec][google.cloud.aiplatform.v1.DeployedModel.explanation_spec\] will be used for the explanation configuration.
#[prost(message, optional, tag = "9")]
pub explanation_spec: ::core::option::Option<ExplanationSpec>,
/// The service account that the DeployedModel's container runs as. Specify the
/// email address of the service account. If this service account is not
/// specified, the container runs as a service account that doesn't have access
/// to the resource project.
///
/// Users deploying the Model must have the `iam.serviceAccounts.actAs`
/// permission on this service account.
#[prost(string, tag = "11")]
pub service_account: ::prost::alloc::string::String,
/// For custom-trained Models and AutoML Tabular Models, the container of the
/// DeployedModel instances will send `stderr` and `stdout` streams to
/// Stackdriver Logging by default. Please note that the logs incur cost,
/// which are subject to [Cloud Logging
/// pricing](<https://cloud.google.com/stackdriver/pricing>).
///
/// User can disable container logging by setting this flag to true.
#[prost(bool, tag = "15")]
pub disable_container_logging: bool,
/// These logs are like standard server access logs, containing
/// information like timestamp and latency for each prediction request.
///
/// Note that Stackdriver logs may incur a cost, especially if your project
/// receives prediction requests at a high queries per second rate (QPS).
/// Estimate your costs before enabling this option.
#[prost(bool, tag = "13")]
pub enable_access_logging: bool,
/// Output only. Provide paths for users to send predict/explain/health requests directly to
/// the deployed model services running on Cloud via private services access.
/// This field is populated if \[network][google.cloud.aiplatform.v1.Endpoint.network\] is configured.
#[prost(message, optional, tag = "14")]
pub private_endpoints: ::core::option::Option<PrivateEndpoints>,
/// The prediction (for example, the machine) resources that the DeployedModel
/// uses. The user is billed for the resources (at least their minimal amount)
/// even if the DeployedModel receives no traffic.
/// Not all Models support all resources types. See
/// \[Model.supported_deployment_resources_types][google.cloud.aiplatform.v1.Model.supported_deployment_resources_types\].
#[prost(oneof = "deployed_model::PredictionResources", tags = "7, 8")]
pub prediction_resources: ::core::option::Option<deployed_model::PredictionResources>,
}
/// Nested message and enum types in `DeployedModel`.
pub mod deployed_model {
/// The prediction (for example, the machine) resources that the DeployedModel
/// uses. The user is billed for the resources (at least their minimal amount)
/// even if the DeployedModel receives no traffic.
/// Not all Models support all resources types. See
/// \[Model.supported_deployment_resources_types][google.cloud.aiplatform.v1.Model.supported_deployment_resources_types\].
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum PredictionResources {
/// A description of resources that are dedicated to the DeployedModel, and
/// that need a higher degree of manual configuration.
#[prost(message, tag = "7")]
DedicatedResources(super::DedicatedResources),
/// A description of resources that to large degree are decided by Vertex
/// AI, and require only a modest additional configuration.
#[prost(message, tag = "8")]
AutomaticResources(super::AutomaticResources),
}
}
/// PrivateEndpoints is used to provide paths for users to send
/// requests via private services access.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PrivateEndpoints {
/// Output only. Http(s) path to send prediction requests.
#[prost(string, tag = "1")]
pub predict_http_uri: ::prost::alloc::string::String,
/// Output only. Http(s) path to send explain requests.
#[prost(string, tag = "2")]
pub explain_http_uri: ::prost::alloc::string::String,
/// Output only. Http(s) path to send health check requests.
#[prost(string, tag = "3")]
pub health_http_uri: ::prost::alloc::string::String,
}
/// Request message for \[EndpointService.CreateEndpoint][google.cloud.aiplatform.v1.EndpointService.CreateEndpoint\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateEndpointRequest {
/// Required. The resource name of the Location to create the Endpoint in.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The Endpoint to create.
#[prost(message, optional, tag = "2")]
pub endpoint: ::core::option::Option<Endpoint>,
}
/// Runtime operation information for \[EndpointService.CreateEndpoint][google.cloud.aiplatform.v1.EndpointService.CreateEndpoint\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateEndpointOperationMetadata {
/// The operation generic information.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
/// Request message for \[EndpointService.GetEndpoint][google.cloud.aiplatform.v1.EndpointService.GetEndpoint\]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetEndpointRequest {
/// Required. The name of the Endpoint resource.
/// Format:
/// `projects/{project}/locations/{location}/endpoints/{endpoint}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[EndpointService.ListEndpoints][google.cloud.aiplatform.v1.EndpointService.ListEndpoints\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListEndpointsRequest {
/// Required. The resource name of the Location from which to list the Endpoints.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Optional. An expression for filtering the results of the request. For field names
/// both snake_case and camelCase are supported.
///
/// * `endpoint` supports = and !=. `endpoint` represents the Endpoint ID,
/// i.e. the last segment of the Endpoint's [resource name]\[google.cloud.aiplatform.v1.Endpoint.name\].
/// * `display_name` supports = and, !=
/// * `labels` supports general map functions that is:
/// * `labels.key=value` - key:value equality
/// * `labels.key:* or labels:key - key existence
/// * A key including a space must be quoted. `labels."a key"`.
///
/// Some examples:
/// * `endpoint=1`
/// * `displayName="myDisplayName"`
/// * `labels.myKey="myValue"`
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// Optional. The standard list page size.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// Optional. The standard list page token.
/// Typically obtained via
/// \[ListEndpointsResponse.next_page_token][google.cloud.aiplatform.v1.ListEndpointsResponse.next_page_token\] of the previous
/// \[EndpointService.ListEndpoints][google.cloud.aiplatform.v1.EndpointService.ListEndpoints\] call.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Optional. Mask specifying which fields to read.
#[prost(message, optional, tag = "5")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
/// A comma-separated list of fields to order by, sorted in ascending order.
/// Use "desc" after a field name for descending.
/// Supported fields:
/// * `display_name`
/// * `create_time`
/// * `update_time`
///
/// Example: `display_name, create_time desc`.
#[prost(string, tag = "6")]
pub order_by: ::prost::alloc::string::String,
}
/// Response message for \[EndpointService.ListEndpoints][google.cloud.aiplatform.v1.EndpointService.ListEndpoints\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListEndpointsResponse {
/// List of Endpoints in the requested page.
#[prost(message, repeated, tag = "1")]
pub endpoints: ::prost::alloc::vec::Vec<Endpoint>,
/// A token to retrieve the next page of results.
/// Pass to \[ListEndpointsRequest.page_token][google.cloud.aiplatform.v1.ListEndpointsRequest.page_token\] to obtain that page.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[EndpointService.UpdateEndpoint][google.cloud.aiplatform.v1.EndpointService.UpdateEndpoint\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateEndpointRequest {
/// Required. The Endpoint which replaces the resource on the server.
#[prost(message, optional, tag = "1")]
pub endpoint: ::core::option::Option<Endpoint>,
/// Required. The update mask applies to the resource. See \[google.protobuf.FieldMask][google.protobuf.FieldMask\].
#[prost(message, optional, tag = "2")]
pub update_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Request message for \[EndpointService.DeleteEndpoint][google.cloud.aiplatform.v1.EndpointService.DeleteEndpoint\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteEndpointRequest {
/// Required. The name of the Endpoint resource to be deleted.
/// Format:
/// `projects/{project}/locations/{location}/endpoints/{endpoint}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[EndpointService.DeployModel][google.cloud.aiplatform.v1.EndpointService.DeployModel\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeployModelRequest {
/// Required. The name of the Endpoint resource into which to deploy a Model.
/// Format:
/// `projects/{project}/locations/{location}/endpoints/{endpoint}`
#[prost(string, tag = "1")]
pub endpoint: ::prost::alloc::string::String,
/// Required. The DeployedModel to be created within the Endpoint. Note that
/// \[Endpoint.traffic_split][google.cloud.aiplatform.v1.Endpoint.traffic_split\] must be updated for the DeployedModel to start
/// receiving traffic, either as part of this call, or via
/// \[EndpointService.UpdateEndpoint][google.cloud.aiplatform.v1.EndpointService.UpdateEndpoint\].
#[prost(message, optional, tag = "2")]
pub deployed_model: ::core::option::Option<DeployedModel>,
/// A map from a DeployedModel's ID to the percentage of this Endpoint's
/// traffic that should be forwarded to that DeployedModel.
///
/// If this field is non-empty, then the Endpoint's
/// \[traffic_split][google.cloud.aiplatform.v1.Endpoint.traffic_split\] will be overwritten with it.
/// To refer to the ID of the just being deployed Model, a "0" should be used,
/// and the actual ID of the new DeployedModel will be filled in its place by
/// this method. The traffic percentage values must add up to 100.
///
/// If this field is empty, then the Endpoint's
/// \[traffic_split][google.cloud.aiplatform.v1.Endpoint.traffic_split\] is not updated.
#[prost(map = "string, int32", tag = "3")]
pub traffic_split: ::std::collections::HashMap<::prost::alloc::string::String, i32>,
}
/// Response message for \[EndpointService.DeployModel][google.cloud.aiplatform.v1.EndpointService.DeployModel\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeployModelResponse {
/// The DeployedModel that had been deployed in the Endpoint.
#[prost(message, optional, tag = "1")]
pub deployed_model: ::core::option::Option<DeployedModel>,
}
/// Runtime operation information for \[EndpointService.DeployModel][google.cloud.aiplatform.v1.EndpointService.DeployModel\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeployModelOperationMetadata {
/// The operation generic information.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
/// Request message for \[EndpointService.UndeployModel][google.cloud.aiplatform.v1.EndpointService.UndeployModel\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UndeployModelRequest {
/// Required. The name of the Endpoint resource from which to undeploy a Model.
/// Format:
/// `projects/{project}/locations/{location}/endpoints/{endpoint}`
#[prost(string, tag = "1")]
pub endpoint: ::prost::alloc::string::String,
/// Required. The ID of the DeployedModel to be undeployed from the Endpoint.
#[prost(string, tag = "2")]
pub deployed_model_id: ::prost::alloc::string::String,
/// If this field is provided, then the Endpoint's
/// \[traffic_split][google.cloud.aiplatform.v1.Endpoint.traffic_split\] will be overwritten with it. If
/// last DeployedModel is being undeployed from the Endpoint, the
/// \[Endpoint.traffic_split\] will always end up empty when this call returns.
/// A DeployedModel will be successfully undeployed only if it doesn't have
/// any traffic assigned to it when this method executes, or if this field
/// unassigns any traffic to it.
#[prost(map = "string, int32", tag = "3")]
pub traffic_split: ::std::collections::HashMap<::prost::alloc::string::String, i32>,
}
/// Response message for \[EndpointService.UndeployModel][google.cloud.aiplatform.v1.EndpointService.UndeployModel\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UndeployModelResponse {}
/// Runtime operation information for \[EndpointService.UndeployModel][google.cloud.aiplatform.v1.EndpointService.UndeployModel\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UndeployModelOperationMetadata {
/// The operation generic information.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
#[doc = r" Generated client implementations."]
pub mod endpoint_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " A service for managing Vertex AI's Endpoints."]
#[derive(Debug, Clone)]
pub struct EndpointServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> EndpointServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> EndpointServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
EndpointServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Creates an Endpoint."]
pub async fn create_endpoint(
&mut self,
request: impl tonic::IntoRequest<super::CreateEndpointRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.EndpointService/CreateEndpoint",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets an Endpoint."]
pub async fn get_endpoint(
&mut self,
request: impl tonic::IntoRequest<super::GetEndpointRequest>,
) -> Result<tonic::Response<super::Endpoint>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.EndpointService/GetEndpoint",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists Endpoints in a Location."]
pub async fn list_endpoints(
&mut self,
request: impl tonic::IntoRequest<super::ListEndpointsRequest>,
) -> Result<tonic::Response<super::ListEndpointsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.EndpointService/ListEndpoints",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates an Endpoint."]
pub async fn update_endpoint(
&mut self,
request: impl tonic::IntoRequest<super::UpdateEndpointRequest>,
) -> Result<tonic::Response<super::Endpoint>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.EndpointService/UpdateEndpoint",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes an Endpoint."]
pub async fn delete_endpoint(
&mut self,
request: impl tonic::IntoRequest<super::DeleteEndpointRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.EndpointService/DeleteEndpoint",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deploys a Model into this Endpoint, creating a DeployedModel within it."]
pub async fn deploy_model(
&mut self,
request: impl tonic::IntoRequest<super::DeployModelRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.EndpointService/DeployModel",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Undeploys a Model from an Endpoint, removing a DeployedModel from it, and"]
#[doc = " freeing all resources it's using."]
pub async fn undeploy_model(
&mut self,
request: impl tonic::IntoRequest<super::UndeployModelRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.EndpointService/UndeployModel",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
}
/// Feature Metadata information that describes an attribute of an entity type.
/// For example, apple is an entity type, and color is a feature that describes
/// apple.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Feature {
/// Immutable. Name of the Feature.
/// Format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}/features/{feature}`
///
/// The last part feature is assigned by the client. The feature can be up to
/// 64 characters long and can consist only of ASCII Latin letters A-Z and a-z,
/// underscore(_), and ASCII digits 0-9 starting with a letter. The value will
/// be unique given an entity type.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Description of the Feature.
#[prost(string, tag = "2")]
pub description: ::prost::alloc::string::String,
/// Required. Immutable. Type of Feature value.
#[prost(enumeration = "feature::ValueType", tag = "3")]
pub value_type: i32,
/// Output only. Timestamp when this EntityType was created.
#[prost(message, optional, tag = "4")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this EntityType was most recently updated.
#[prost(message, optional, tag = "5")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// Optional. The labels with user-defined metadata to organize your Features.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
///
/// See <https://goo.gl/xmQnxf> for more information on and examples of labels.
/// No more than 64 user labels can be associated with one Feature (System
/// labels are excluded)."
/// System reserved label keys are prefixed with "aiplatform.googleapis.com/"
/// and are immutable.
#[prost(map = "string, string", tag = "6")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Used to perform a consistent read-modify-write updates. If not set, a blind
/// "overwrite" update happens.
#[prost(string, tag = "7")]
pub etag: ::prost::alloc::string::String,
}
/// Nested message and enum types in `Feature`.
pub mod feature {
/// An enum representing the value type of a feature.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum ValueType {
/// The value type is unspecified.
Unspecified = 0,
/// Used for Feature that is a boolean.
Bool = 1,
/// Used for Feature that is a list of boolean.
BoolArray = 2,
/// Used for Feature that is double.
Double = 3,
/// Used for Feature that is a list of double.
DoubleArray = 4,
/// Used for Feature that is INT64.
Int64 = 9,
/// Used for Feature that is a list of INT64.
Int64Array = 10,
/// Used for Feature that is string.
String = 11,
/// Used for Feature that is a list of String.
StringArray = 12,
/// Used for Feature that is bytes.
Bytes = 13,
}
}
/// An entity type is a type of object in a system that needs to be modeled and
/// have stored information about. For example, driver is an entity type, and
/// driver0 is an instance of an entity type driver.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct EntityType {
/// Immutable. Name of the EntityType.
/// Format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
///
/// The last part entity_type is assigned by the client. The entity_type can be
/// up to 64 characters long and can consist only of ASCII Latin letters A-Z
/// and a-z and underscore(_), and ASCII digits 0-9 starting with a letter. The
/// value will be unique given a featurestore.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Optional. Description of the EntityType.
#[prost(string, tag = "2")]
pub description: ::prost::alloc::string::String,
/// Output only. Timestamp when this EntityType was created.
#[prost(message, optional, tag = "3")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this EntityType was most recently updated.
#[prost(message, optional, tag = "4")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// Optional. The labels with user-defined metadata to organize your EntityTypes.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
///
/// See <https://goo.gl/xmQnxf> for more information on and examples of labels.
/// No more than 64 user labels can be associated with one EntityType (System
/// labels are excluded)."
/// System reserved label keys are prefixed with "aiplatform.googleapis.com/"
/// and are immutable.
#[prost(map = "string, string", tag = "6")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Optional. Used to perform a consistent read-modify-write updates. If not set, a blind
/// "overwrite" update happens.
#[prost(string, tag = "7")]
pub etag: ::prost::alloc::string::String,
}
/// An edge describing the relationship between an Artifact and an Execution in
/// a lineage graph.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Event {
/// Required. The relative resource name of the Artifact in the Event.
#[prost(string, tag = "1")]
pub artifact: ::prost::alloc::string::String,
/// Output only. The relative resource name of the Execution in the Event.
#[prost(string, tag = "2")]
pub execution: ::prost::alloc::string::String,
/// Output only. Time the Event occurred.
#[prost(message, optional, tag = "3")]
pub event_time: ::core::option::Option<::prost_types::Timestamp>,
/// Required. The type of the Event.
#[prost(enumeration = "event::Type", tag = "4")]
pub r#type: i32,
/// The labels with user-defined metadata to annotate Events.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
/// No more than 64 user labels can be associated with one Event (System
/// labels are excluded).
///
/// See <https://goo.gl/xmQnxf> for more information and examples of labels.
/// System reserved label keys are prefixed with "aiplatform.googleapis.com/"
/// and are immutable.
#[prost(map = "string, string", tag = "5")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
}
/// Nested message and enum types in `Event`.
pub mod event {
/// Describes whether an Event's Artifact is the Execution's input or output.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum Type {
/// Unspecified whether input or output of the Execution.
Unspecified = 0,
/// An input of the Execution.
Input = 1,
/// An output of the Execution.
Output = 2,
}
}
/// Instance of a general execution.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Execution {
/// Output only. The resource name of the Execution.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// User provided display name of the Execution.
/// May be up to 128 Unicode characters.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// The state of this Execution. This is a property of the Execution, and does
/// not imply or capture any ongoing process. This property is managed by
/// clients (such as Vertex Pipelines) and the system does not prescribe
/// or check the validity of state transitions.
#[prost(enumeration = "execution::State", tag = "6")]
pub state: i32,
/// An eTag used to perform consistent read-modify-write updates. If not set, a
/// blind "overwrite" update happens.
#[prost(string, tag = "9")]
pub etag: ::prost::alloc::string::String,
/// The labels with user-defined metadata to organize your Executions.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
/// No more than 64 user labels can be associated with one Execution (System
/// labels are excluded).
#[prost(map = "string, string", tag = "10")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Output only. Timestamp when this Execution was created.
#[prost(message, optional, tag = "11")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this Execution was last updated.
#[prost(message, optional, tag = "12")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// The title of the schema describing the metadata.
///
/// Schema title and version is expected to be registered in earlier Create
/// Schema calls. And both are used together as unique identifiers to identify
/// schemas within the local metadata store.
#[prost(string, tag = "13")]
pub schema_title: ::prost::alloc::string::String,
/// The version of the schema in `schema_title` to use.
///
/// Schema title and version is expected to be registered in earlier Create
/// Schema calls. And both are used together as unique identifiers to identify
/// schemas within the local metadata store.
#[prost(string, tag = "14")]
pub schema_version: ::prost::alloc::string::String,
/// Properties of the Execution.
/// The size of this field should not exceed 200KB.
#[prost(message, optional, tag = "15")]
pub metadata: ::core::option::Option<::prost_types::Struct>,
/// Description of the Execution
#[prost(string, tag = "16")]
pub description: ::prost::alloc::string::String,
}
/// Nested message and enum types in `Execution`.
pub mod execution {
/// Describes the state of the Execution.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum State {
/// Unspecified Execution state
Unspecified = 0,
/// The Execution is new
New = 1,
/// The Execution is running
Running = 2,
/// The Execution has finished running
Complete = 3,
/// The Execution has failed
Failed = 4,
/// The Execution completed through Cache hit.
Cached = 5,
/// The Execution was cancelled.
Cancelled = 6,
}
}
/// Matcher for Features of an EntityType by Feature ID.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct IdMatcher {
/// Required. The following are accepted as `ids`:
///
/// * A single-element list containing only `*`, which selects all Features
/// in the target EntityType, or
/// * A list containing only Feature IDs, which selects only Features with
/// those IDs in the target EntityType.
#[prost(string, repeated, tag = "1")]
pub ids: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
/// Selector for Features of an EntityType.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FeatureSelector {
/// Required. Matches Features based on ID.
#[prost(message, optional, tag = "1")]
pub id_matcher: ::core::option::Option<IdMatcher>,
}
/// Vertex Feature Store provides a centralized repository for organizing,
/// storing, and serving ML features. The Featurestore is a top-level container
/// for your features and their values.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Featurestore {
/// Output only. Name of the Featurestore. Format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Output only. Timestamp when this Featurestore was created.
#[prost(message, optional, tag = "3")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this Featurestore was last updated.
#[prost(message, optional, tag = "4")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// Optional. Used to perform consistent read-modify-write updates. If not set, a blind
/// "overwrite" update happens.
#[prost(string, tag = "5")]
pub etag: ::prost::alloc::string::String,
/// Optional. The labels with user-defined metadata to organize your Featurestore.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
///
/// See <https://goo.gl/xmQnxf> for more information on and examples of labels.
/// No more than 64 user labels can be associated with one Featurestore(System
/// labels are excluded)."
/// System reserved label keys are prefixed with "aiplatform.googleapis.com/"
/// and are immutable.
#[prost(map = "string, string", tag = "6")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Required. Config for online serving resources.
#[prost(message, optional, tag = "7")]
pub online_serving_config: ::core::option::Option<featurestore::OnlineServingConfig>,
/// Output only. State of the featurestore.
#[prost(enumeration = "featurestore::State", tag = "8")]
pub state: i32,
/// Optional. Customer-managed encryption key spec for data storage. If set, both of the
/// online and offline data storage will be secured by this key.
#[prost(message, optional, tag = "10")]
pub encryption_spec: ::core::option::Option<EncryptionSpec>,
}
/// Nested message and enum types in `Featurestore`.
pub mod featurestore {
/// OnlineServingConfig specifies the details for provisioning online serving
/// resources.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct OnlineServingConfig {
/// The number of nodes for each cluster. The number of nodes will not
/// scale automatically but can be scaled manually by providing different
/// values when updating.
#[prost(int32, tag = "2")]
pub fixed_node_count: i32,
}
/// Possible states a Featurestore can have.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum State {
/// Default value. This value is unused.
Unspecified = 0,
/// State when the Featurestore configuration is not being updated and the
/// fields reflect the current configuration of the Featurestore. The
/// Featurestore is usable in this state.
Stable = 1,
/// State when the Featurestore configuration is being updated and the fields
/// reflect the updated configuration of the Featurestore, not the current
/// one. For example, `online_serving_config.fixed_node_count` can take
/// minutes to update. While the update is in progress, the Featurestore
/// will be in the UPDATING state and the value of `fixed_node_count` will be
/// the updated value. Until the update completes, the actual number of nodes
/// can still be the original value of `fixed_node_count`. The Featurestore
/// is still usable in this state.
Updating = 2,
}
}
/// A list of boolean values.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BoolArray {
/// A list of bool values.
#[prost(bool, repeated, tag = "1")]
pub values: ::prost::alloc::vec::Vec<bool>,
}
/// A list of double values.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DoubleArray {
/// A list of bool values.
#[prost(double, repeated, tag = "1")]
pub values: ::prost::alloc::vec::Vec<f64>,
}
/// A list of int64 values.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Int64Array {
/// A list of int64 values.
#[prost(int64, repeated, tag = "1")]
pub values: ::prost::alloc::vec::Vec<i64>,
}
/// A list of string values.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct StringArray {
/// A list of string values.
#[prost(string, repeated, tag = "1")]
pub values: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
/// Request message for \[FeaturestoreOnlineServingService.ReadFeatureValues][google.cloud.aiplatform.v1.FeaturestoreOnlineServingService.ReadFeatureValues\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ReadFeatureValuesRequest {
/// Required. The resource name of the EntityType for the entity being read.
/// Value format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entityType}`.
/// For example, for a machine learning model predicting user clicks on a
/// website, an EntityType ID could be `user`.
#[prost(string, tag = "1")]
pub entity_type: ::prost::alloc::string::String,
/// Required. ID for a specific entity. For example,
/// for a machine learning model predicting user clicks on a website, an entity
/// ID could be `user_123`.
#[prost(string, tag = "2")]
pub entity_id: ::prost::alloc::string::String,
/// Required. Selector choosing Features of the target EntityType.
#[prost(message, optional, tag = "3")]
pub feature_selector: ::core::option::Option<FeatureSelector>,
}
/// Response message for \[FeaturestoreOnlineServingService.ReadFeatureValues][google.cloud.aiplatform.v1.FeaturestoreOnlineServingService.ReadFeatureValues\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ReadFeatureValuesResponse {
/// Response header.
#[prost(message, optional, tag = "1")]
pub header: ::core::option::Option<read_feature_values_response::Header>,
/// Entity view with Feature values. This may be the entity in the
/// Featurestore if values for all Features were requested, or a projection
/// of the entity in the Featurestore if values for only some Features were
/// requested.
#[prost(message, optional, tag = "2")]
pub entity_view: ::core::option::Option<read_feature_values_response::EntityView>,
}
/// Nested message and enum types in `ReadFeatureValuesResponse`.
pub mod read_feature_values_response {
/// Metadata for requested Features.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FeatureDescriptor {
/// Feature ID.
#[prost(string, tag = "1")]
pub id: ::prost::alloc::string::String,
}
/// Response header with metadata for the requested
/// \[ReadFeatureValuesRequest.entity_type][google.cloud.aiplatform.v1.ReadFeatureValuesRequest.entity_type\] and Features.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Header {
/// The resource name of the EntityType from the
/// \[ReadFeatureValuesRequest][google.cloud.aiplatform.v1.ReadFeatureValuesRequest\]. Value format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entityType}`.
#[prost(string, tag = "1")]
pub entity_type: ::prost::alloc::string::String,
/// List of Feature metadata corresponding to each piece of
/// \[ReadFeatureValuesResponse.data][\].
#[prost(message, repeated, tag = "2")]
pub feature_descriptors: ::prost::alloc::vec::Vec<FeatureDescriptor>,
}
/// Entity view with Feature values.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct EntityView {
/// ID of the requested entity.
#[prost(string, tag = "1")]
pub entity_id: ::prost::alloc::string::String,
/// Each piece of data holds the k
/// requested values for one requested Feature. If no values
/// for the requested Feature exist, the corresponding cell will be empty.
/// This has the same size and is in the same order as the features from the
/// header \[ReadFeatureValuesResponse.header][google.cloud.aiplatform.v1.ReadFeatureValuesResponse.header\].
#[prost(message, repeated, tag = "2")]
pub data: ::prost::alloc::vec::Vec<entity_view::Data>,
}
/// Nested message and enum types in `EntityView`.
pub mod entity_view {
/// Container to hold value(s), successive in time, for one Feature from the
/// request.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Data {
#[prost(oneof = "data::Data", tags = "1, 2")]
pub data: ::core::option::Option<data::Data>,
}
/// Nested message and enum types in `Data`.
pub mod data {
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Data {
/// Feature value if a single value is requested.
#[prost(message, tag = "1")]
Value(super::super::super::FeatureValue),
/// Feature values list if values, successive in time, are requested.
/// If the requested number of values is greater than the number of
/// existing Feature values, nonexistent values are omitted instead of
/// being returned as empty.
#[prost(message, tag = "2")]
Values(super::super::super::FeatureValueList),
}
}
}
}
/// Request message for
/// \[FeaturestoreOnlineServingService.StreamingFeatureValuesRead][\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct StreamingReadFeatureValuesRequest {
/// Required. The resource name of the entities' type.
/// Value format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entityType}`.
/// For example,
/// for a machine learning model predicting user clicks on a website, an
/// EntityType ID could be `user`.
#[prost(string, tag = "1")]
pub entity_type: ::prost::alloc::string::String,
/// Required. IDs of entities to read Feature values of. The maximum number of IDs is
/// 100. For example, for a machine learning model predicting user clicks on a
/// website, an entity ID could be `user_123`.
#[prost(string, repeated, tag = "2")]
pub entity_ids: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Required. Selector choosing Features of the target EntityType. Feature IDs will be
/// deduplicated.
#[prost(message, optional, tag = "3")]
pub feature_selector: ::core::option::Option<FeatureSelector>,
}
/// Value for a feature.
/// NEXT ID: 15
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FeatureValue {
/// Metadata of feature value.
#[prost(message, optional, tag = "14")]
pub metadata: ::core::option::Option<feature_value::Metadata>,
/// Value for the feature.
#[prost(oneof = "feature_value::Value", tags = "1, 2, 5, 6, 7, 8, 11, 12, 13")]
pub value: ::core::option::Option<feature_value::Value>,
}
/// Nested message and enum types in `FeatureValue`.
pub mod feature_value {
/// Metadata of feature value.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Metadata {
/// Feature generation timestamp. Typically, it is provided by user at
/// feature ingestion time. If not, feature store
/// will use the system timestamp when the data is ingested into feature
/// store.
#[prost(message, optional, tag = "1")]
pub generate_time: ::core::option::Option<::prost_types::Timestamp>,
}
/// Value for the feature.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Value {
/// Bool type feature value.
#[prost(bool, tag = "1")]
BoolValue(bool),
/// Double type feature value.
#[prost(double, tag = "2")]
DoubleValue(f64),
/// Int64 feature value.
#[prost(int64, tag = "5")]
Int64Value(i64),
/// String feature value.
#[prost(string, tag = "6")]
StringValue(::prost::alloc::string::String),
/// A list of bool type feature value.
#[prost(message, tag = "7")]
BoolArrayValue(super::BoolArray),
/// A list of double type feature value.
#[prost(message, tag = "8")]
DoubleArrayValue(super::DoubleArray),
/// A list of int64 type feature value.
#[prost(message, tag = "11")]
Int64ArrayValue(super::Int64Array),
/// A list of string type feature value.
#[prost(message, tag = "12")]
StringArrayValue(super::StringArray),
/// Bytes feature value.
#[prost(bytes, tag = "13")]
BytesValue(::prost::alloc::vec::Vec<u8>),
}
}
/// Container for list of values.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FeatureValueList {
/// A list of feature values. All of them should be the same data type.
#[prost(message, repeated, tag = "1")]
pub values: ::prost::alloc::vec::Vec<FeatureValue>,
}
#[doc = r" Generated client implementations."]
pub mod featurestore_online_serving_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " A service for serving online feature values."]
#[derive(Debug, Clone)]
pub struct FeaturestoreOnlineServingServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> FeaturestoreOnlineServingServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> FeaturestoreOnlineServingServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
FeaturestoreOnlineServingServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Reads Feature values of a specific entity of an EntityType. For reading"]
#[doc = " feature values of multiple entities of an EntityType, please use"]
#[doc = " StreamingReadFeatureValues."]
pub async fn read_feature_values(
&mut self,
request: impl tonic::IntoRequest<super::ReadFeatureValuesRequest>,
) -> Result<tonic::Response<super::ReadFeatureValuesResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreOnlineServingService/ReadFeatureValues",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Reads Feature values for multiple entities. Depending on their size, data"]
#[doc = " for different entities may be broken"]
#[doc = " up across multiple responses."]
pub async fn streaming_read_feature_values(
&mut self,
request: impl tonic::IntoRequest<super::StreamingReadFeatureValuesRequest>,
) -> Result<
tonic::Response<tonic::codec::Streaming<super::ReadFeatureValuesResponse>>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http :: uri :: PathAndQuery :: from_static ("/google.cloud.aiplatform.v1.FeaturestoreOnlineServingService/StreamingReadFeatureValues") ;
self.inner.server_streaming(request.into_request(), path, codec).await
}
}
}
/// Request message for \[FeaturestoreService.CreateFeaturestore][google.cloud.aiplatform.v1.FeaturestoreService.CreateFeaturestore\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateFeaturestoreRequest {
/// Required. The resource name of the Location to create Featurestores.
/// Format:
/// `projects/{project}/locations/{location}'`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The Featurestore to create.
#[prost(message, optional, tag = "2")]
pub featurestore: ::core::option::Option<Featurestore>,
/// Required. The ID to use for this Featurestore, which will become the final component
/// of the Featurestore's resource name.
///
/// This value may be up to 60 characters, and valid characters are
/// `\[a-z0-9_\]`. The first character cannot be a number.
///
/// The value must be unique within the project and location.
#[prost(string, tag = "3")]
pub featurestore_id: ::prost::alloc::string::String,
}
/// Request message for \[FeaturestoreService.GetFeaturestore][google.cloud.aiplatform.v1.FeaturestoreService.GetFeaturestore\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetFeaturestoreRequest {
/// Required. The name of the Featurestore resource.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[FeaturestoreService.ListFeaturestores][google.cloud.aiplatform.v1.FeaturestoreService.ListFeaturestores\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListFeaturestoresRequest {
/// Required. The resource name of the Location to list Featurestores.
/// Format:
/// `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Lists the featurestores that match the filter expression. The following
/// fields are supported:
///
/// * `create_time`: Supports `=`, `!=`, `<`, `>`, `<=`, and `>=` comparisons.
/// Values must be
/// in RFC 3339 format.
/// * `update_time`: Supports `=`, `!=`, `<`, `>`, `<=`, and `>=` comparisons.
/// Values must be
/// in RFC 3339 format.
/// * `online_serving_config.fixed_node_count`: Supports `=`, `!=`, `<`, `>`,
/// `<=`, and `>=` comparisons.
/// * `labels`: Supports key-value equality and key presence.
///
/// Examples:
///
/// * `create_time > "2020-01-01" OR update_time > "2020-01-01"`
/// Featurestores created or updated after 2020-01-01.
/// * `labels.env = "prod"`
/// Featurestores with label "env" set to "prod".
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The maximum number of Featurestores to return. The service may return fewer
/// than this value. If unspecified, at most 100 Featurestores will be
/// returned. The maximum value is 100; any value greater than 100 will be
/// coerced to 100.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// A page token, received from a previous
/// \[FeaturestoreService.ListFeaturestores][google.cloud.aiplatform.v1.FeaturestoreService.ListFeaturestores\] call.
/// Provide this to retrieve the subsequent page.
///
/// When paginating, all other parameters provided to
/// \[FeaturestoreService.ListFeaturestores][google.cloud.aiplatform.v1.FeaturestoreService.ListFeaturestores\] must
/// match the call that provided the page token.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// A comma-separated list of fields to order by, sorted in ascending order.
/// Use "desc" after a field name for descending.
/// Supported Fields:
///
/// * `create_time`
/// * `update_time`
/// * `online_serving_config.fixed_node_count`
#[prost(string, tag = "5")]
pub order_by: ::prost::alloc::string::String,
/// Mask specifying which fields to read.
#[prost(message, optional, tag = "6")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Response message for \[FeaturestoreService.ListFeaturestores][google.cloud.aiplatform.v1.FeaturestoreService.ListFeaturestores\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListFeaturestoresResponse {
/// The Featurestores matching the request.
#[prost(message, repeated, tag = "1")]
pub featurestores: ::prost::alloc::vec::Vec<Featurestore>,
/// A token, which can be sent as \[ListFeaturestoresRequest.page_token][google.cloud.aiplatform.v1.ListFeaturestoresRequest.page_token\] to
/// retrieve the next page.
/// If this field is omitted, there are no subsequent pages.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[FeaturestoreService.UpdateFeaturestore][google.cloud.aiplatform.v1.FeaturestoreService.UpdateFeaturestore\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateFeaturestoreRequest {
/// Required. The Featurestore's `name` field is used to identify the Featurestore to be
/// updated.
/// Format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}`
#[prost(message, optional, tag = "1")]
pub featurestore: ::core::option::Option<Featurestore>,
/// Field mask is used to specify the fields to be overwritten in the
/// Featurestore resource by the update.
/// The fields specified in the update_mask are relative to the resource, not
/// the full request. A field will be overwritten if it is in the mask. If the
/// user does not provide a mask then only the non-empty fields present in the
/// request will be overwritten. Set the update_mask to `*` to override all
/// fields.
///
/// Updatable fields:
///
/// * `labels`
/// * `online_serving_config.fixed_node_count`
#[prost(message, optional, tag = "2")]
pub update_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Request message for \[FeaturestoreService.DeleteFeaturestore][google.cloud.aiplatform.v1.FeaturestoreService.DeleteFeaturestore\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteFeaturestoreRequest {
/// Required. The name of the Featurestore to be deleted.
/// Format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// If set to true, any EntityTypes and Features for this Featurestore will
/// also be deleted. (Otherwise, the request will only work if the Featurestore
/// has no EntityTypes.)
#[prost(bool, tag = "2")]
pub force: bool,
}
/// Request message for \[FeaturestoreService.ImportFeatureValues][google.cloud.aiplatform.v1.FeaturestoreService.ImportFeatureValues\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ImportFeatureValuesRequest {
/// Required. The resource name of the EntityType grouping the Features for which values
/// are being imported. Format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entityType}`
#[prost(string, tag = "1")]
pub entity_type: ::prost::alloc::string::String,
/// Source column that holds entity IDs. If not provided, entity IDs are
/// extracted from the column named `entity_id`.
#[prost(string, tag = "5")]
pub entity_id_field: ::prost::alloc::string::String,
/// Required. Specifications defining which Feature values to import from the entity. The
/// request fails if no feature_specs are provided, and having multiple
/// feature_specs for one Feature is not allowed.
#[prost(message, repeated, tag = "8")]
pub feature_specs: ::prost::alloc::vec::Vec<import_feature_values_request::FeatureSpec>,
/// If set, data will not be imported for online serving. This
/// is typically used for backfilling, where Feature generation timestamps are
/// not in the timestamp range needed for online serving.
#[prost(bool, tag = "9")]
pub disable_online_serving: bool,
/// Specifies the number of workers that are used to write data to the
/// Featurestore. Consider the online serving capacity that you require to
/// achieve the desired import throughput without interfering with online
/// serving. The value must be positive, and less than or equal to 100.
/// If not set, defaults to using 1 worker. The low count ensures minimal
/// impact on online serving performance.
#[prost(int32, tag = "11")]
pub worker_count: i32,
/// Details about the source data, including the location of the storage and
/// the format.
#[prost(oneof = "import_feature_values_request::Source", tags = "2, 3, 4")]
pub source: ::core::option::Option<import_feature_values_request::Source>,
/// Source of Feature timestamp for all Feature values of each entity.
/// Timestamps must be millisecond-aligned.
#[prost(oneof = "import_feature_values_request::FeatureTimeSource", tags = "6, 7")]
pub feature_time_source:
::core::option::Option<import_feature_values_request::FeatureTimeSource>,
}
/// Nested message and enum types in `ImportFeatureValuesRequest`.
pub mod import_feature_values_request {
/// Defines the Feature value(s) to import.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FeatureSpec {
/// Required. ID of the Feature to import values of. This Feature must exist in the
/// target EntityType, or the request will fail.
#[prost(string, tag = "1")]
pub id: ::prost::alloc::string::String,
/// Source column to get the Feature values from. If not set, uses the column
/// with the same name as the Feature ID.
#[prost(string, tag = "2")]
pub source_field: ::prost::alloc::string::String,
}
/// Details about the source data, including the location of the storage and
/// the format.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Source {
#[prost(message, tag = "2")]
AvroSource(super::AvroSource),
#[prost(message, tag = "3")]
BigquerySource(super::BigQuerySource),
#[prost(message, tag = "4")]
CsvSource(super::CsvSource),
}
/// Source of Feature timestamp for all Feature values of each entity.
/// Timestamps must be millisecond-aligned.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum FeatureTimeSource {
/// Source column that holds the Feature timestamp for all Feature
/// values in each entity.
#[prost(string, tag = "6")]
FeatureTimeField(::prost::alloc::string::String),
/// Single Feature timestamp for all entities being imported. The
/// timestamp must not have higher than millisecond precision.
#[prost(message, tag = "7")]
FeatureTime(::prost_types::Timestamp),
}
}
/// Response message for \[FeaturestoreService.ImportFeatureValues][google.cloud.aiplatform.v1.FeaturestoreService.ImportFeatureValues\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ImportFeatureValuesResponse {
/// Number of entities that have been imported by the operation.
#[prost(int64, tag = "1")]
pub imported_entity_count: i64,
/// Number of Feature values that have been imported by the operation.
#[prost(int64, tag = "2")]
pub imported_feature_value_count: i64,
/// The number of rows in input source that weren't imported due to either
/// * Not having any featureValues.
/// * Having a null entityId.
/// * Having a null timestamp.
/// * Not being parsable (applicable for CSV sources).
#[prost(int64, tag = "6")]
pub invalid_row_count: i64,
}
/// Request message for \[FeaturestoreService.BatchReadFeatureValues][google.cloud.aiplatform.v1.FeaturestoreService.BatchReadFeatureValues\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BatchReadFeatureValuesRequest {
/// Required. The resource name of the Featurestore from which to query Feature values.
/// Format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}`
#[prost(string, tag = "1")]
pub featurestore: ::prost::alloc::string::String,
/// Required. Specifies output location and format.
#[prost(message, optional, tag = "4")]
pub destination: ::core::option::Option<FeatureValueDestination>,
/// When not empty, the specified fields in the *_read_instances source will be
/// joined as-is in the output, in addition to those fields from the
/// Featurestore Entity.
///
/// For BigQuery source, the type of the pass-through values will be
/// automatically inferred. For CSV source, the pass-through values will be
/// passed as opaque bytes.
#[prost(message, repeated, tag = "8")]
pub pass_through_fields:
::prost::alloc::vec::Vec<batch_read_feature_values_request::PassThroughField>,
/// Required. Specifies EntityType grouping Features to read values of and settings.
/// Each EntityType referenced in
/// \[BatchReadFeatureValuesRequest.entity_type_specs\] must have a column
/// specifying entity IDs in the EntityType in
/// \[BatchReadFeatureValuesRequest.request][\] .
#[prost(message, repeated, tag = "7")]
pub entity_type_specs:
::prost::alloc::vec::Vec<batch_read_feature_values_request::EntityTypeSpec>,
#[prost(oneof = "batch_read_feature_values_request::ReadOption", tags = "3, 5")]
pub read_option: ::core::option::Option<batch_read_feature_values_request::ReadOption>,
}
/// Nested message and enum types in `BatchReadFeatureValuesRequest`.
pub mod batch_read_feature_values_request {
/// Describe pass-through fields in read_instance source.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PassThroughField {
/// Required. The name of the field in the CSV header or the name of the column in
/// BigQuery table. The naming restriction is the same as \[Feature.name][google.cloud.aiplatform.v1.Feature.name\].
#[prost(string, tag = "1")]
pub field_name: ::prost::alloc::string::String,
}
/// Selects Features of an EntityType to read values of and specifies read
/// settings.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct EntityTypeSpec {
/// Required. ID of the EntityType to select Features. The EntityType id is the
/// \[entity_type_id][google.cloud.aiplatform.v1.CreateEntityTypeRequest.entity_type_id\] specified
/// during EntityType creation.
#[prost(string, tag = "1")]
pub entity_type_id: ::prost::alloc::string::String,
/// Required. Selectors choosing which Feature values to read from the EntityType.
#[prost(message, optional, tag = "2")]
pub feature_selector: ::core::option::Option<super::FeatureSelector>,
/// Per-Feature settings for the batch read.
#[prost(message, repeated, tag = "3")]
pub settings: ::prost::alloc::vec::Vec<super::DestinationFeatureSetting>,
}
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum ReadOption {
/// Each read instance consists of exactly one read timestamp and one or more
/// entity IDs identifying entities of the corresponding EntityTypes whose
/// Features are requested.
///
/// Each output instance contains Feature values of requested entities
/// concatenated together as of the read time.
///
/// An example read instance may be `foo_entity_id, bar_entity_id,
/// 2020-01-01T10:00:00.123Z`.
///
/// An example output instance may be `foo_entity_id, bar_entity_id,
/// 2020-01-01T10:00:00.123Z, foo_entity_feature1_value,
/// bar_entity_feature2_value`.
///
/// Timestamp in each read instance must be millisecond-aligned.
///
/// `csv_read_instances` are read instances stored in a plain-text CSV file.
/// The header should be:
/// \[ENTITY_TYPE_ID1\], \[ENTITY_TYPE_ID2\], ..., timestamp
///
/// The columns can be in any order.
///
/// Values in the timestamp column must use the RFC 3339 format, e.g.
/// `2012-07-30T10:43:17.123Z`.
#[prost(message, tag = "3")]
CsvReadInstances(super::CsvSource),
/// Similar to csv_read_instances, but from BigQuery source.
#[prost(message, tag = "5")]
BigqueryReadInstances(super::BigQuerySource),
}
}
/// Request message for \[FeaturestoreService.ExportFeatureValues][google.cloud.aiplatform.v1.FeaturestoreService.ExportFeatureValues\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExportFeatureValuesRequest {
/// Required. The resource name of the EntityType from which to export Feature values.
/// Format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
#[prost(string, tag = "1")]
pub entity_type: ::prost::alloc::string::String,
/// Required. Specifies destination location and format.
#[prost(message, optional, tag = "4")]
pub destination: ::core::option::Option<FeatureValueDestination>,
/// Required. Selects Features to export values of.
#[prost(message, optional, tag = "5")]
pub feature_selector: ::core::option::Option<FeatureSelector>,
/// Per-Feature export settings.
#[prost(message, repeated, tag = "6")]
pub settings: ::prost::alloc::vec::Vec<DestinationFeatureSetting>,
#[prost(oneof = "export_feature_values_request::Mode", tags = "3")]
pub mode: ::core::option::Option<export_feature_values_request::Mode>,
}
/// Nested message and enum types in `ExportFeatureValuesRequest`.
pub mod export_feature_values_request {
/// Describes exporting Feature values as of the snapshot timestamp.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SnapshotExport {
/// Exports Feature values as of this timestamp. If not set,
/// retrieve values as of now. Timestamp, if present, must not have higher
/// than millisecond precision.
#[prost(message, optional, tag = "1")]
pub snapshot_time: ::core::option::Option<::prost_types::Timestamp>,
}
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Mode {
/// Exports Feature values of all entities of the EntityType as of a snapshot
/// time.
#[prost(message, tag = "3")]
SnapshotExport(SnapshotExport),
}
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DestinationFeatureSetting {
/// Required. The ID of the Feature to apply the setting to.
#[prost(string, tag = "1")]
pub feature_id: ::prost::alloc::string::String,
/// Specify the field name in the export destination. If not specified,
/// Feature ID is used.
#[prost(string, tag = "2")]
pub destination_field: ::prost::alloc::string::String,
}
/// A destination location for Feature values and format.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FeatureValueDestination {
#[prost(oneof = "feature_value_destination::Destination", tags = "1, 2, 3")]
pub destination: ::core::option::Option<feature_value_destination::Destination>,
}
/// Nested message and enum types in `FeatureValueDestination`.
pub mod feature_value_destination {
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Destination {
/// Output in BigQuery format.
/// \[BigQueryDestination.output_uri][google.cloud.aiplatform.v1.BigQueryDestination.output_uri\] in
/// \[FeatureValueDestination.bigquery_destination][google.cloud.aiplatform.v1.FeatureValueDestination.bigquery_destination\] must refer to a table.
#[prost(message, tag = "1")]
BigqueryDestination(super::BigQueryDestination),
/// Output in TFRecord format.
///
/// Below are the mapping from Feature value type
/// in Featurestore to Feature value type in TFRecord:
///
/// Value type in Featurestore | Value type in TFRecord
/// DOUBLE, DOUBLE_ARRAY | FLOAT_LIST
/// INT64, INT64_ARRAY | INT64_LIST
/// STRING, STRING_ARRAY, BYTES | BYTES_LIST
/// true -> byte_string("true"), false -> byte_string("false")
/// BOOL, BOOL_ARRAY (true, false) | BYTES_LIST
#[prost(message, tag = "2")]
TfrecordDestination(super::TfRecordDestination),
/// Output in CSV format. Array Feature value types are not allowed in CSV
/// format.
#[prost(message, tag = "3")]
CsvDestination(super::CsvDestination),
}
}
/// Response message for \[FeaturestoreService.ExportFeatureValues][google.cloud.aiplatform.v1.FeaturestoreService.ExportFeatureValues\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExportFeatureValuesResponse {}
/// Response message for \[FeaturestoreService.BatchReadFeatureValues][google.cloud.aiplatform.v1.FeaturestoreService.BatchReadFeatureValues\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BatchReadFeatureValuesResponse {}
/// Request message for \[FeaturestoreService.CreateEntityType][google.cloud.aiplatform.v1.FeaturestoreService.CreateEntityType\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateEntityTypeRequest {
/// Required. The resource name of the Featurestore to create EntityTypes.
/// Format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The EntityType to create.
#[prost(message, optional, tag = "2")]
pub entity_type: ::core::option::Option<EntityType>,
/// Required. The ID to use for the EntityType, which will become the final component of
/// the EntityType's resource name.
///
/// This value may be up to 60 characters, and valid characters are
/// `\[a-z0-9_\]`. The first character cannot be a number.
///
/// The value must be unique within a featurestore.
#[prost(string, tag = "3")]
pub entity_type_id: ::prost::alloc::string::String,
}
/// Request message for \[FeaturestoreService.GetEntityType][google.cloud.aiplatform.v1.FeaturestoreService.GetEntityType\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetEntityTypeRequest {
/// Required. The name of the EntityType resource.
/// Format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[FeaturestoreService.ListEntityTypes][google.cloud.aiplatform.v1.FeaturestoreService.ListEntityTypes\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListEntityTypesRequest {
/// Required. The resource name of the Featurestore to list EntityTypes.
/// Format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Lists the EntityTypes that match the filter expression. The following
/// filters are supported:
///
/// * `create_time`: Supports `=`, `!=`, `<`, `>`, `>=`, and `<=` comparisons.
/// Values must be in RFC 3339 format.
/// * `update_time`: Supports `=`, `!=`, `<`, `>`, `>=`, and `<=` comparisons.
/// Values must be in RFC 3339 format.
/// * `labels`: Supports key-value equality as well as key presence.
///
/// Examples:
///
/// * `create_time > \"2020-01-31T15:30:00.000000Z\" OR
/// update_time > \"2020-01-31T15:30:00.000000Z\"` --> EntityTypes created
/// or updated after 2020-01-31T15:30:00.000000Z.
/// * `labels.active = yes AND labels.env = prod` --> EntityTypes having both
/// (active: yes) and (env: prod) labels.
/// * `labels.env: *` --> Any EntityType which has a label with 'env' as the
/// key.
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The maximum number of EntityTypes to return. The service may return fewer
/// than this value. If unspecified, at most 1000 EntityTypes will be returned.
/// The maximum value is 1000; any value greater than 1000 will be coerced to
/// 1000.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// A page token, received from a previous
/// \[FeaturestoreService.ListEntityTypes][google.cloud.aiplatform.v1.FeaturestoreService.ListEntityTypes\] call.
/// Provide this to retrieve the subsequent page.
///
/// When paginating, all other parameters provided to
/// \[FeaturestoreService.ListEntityTypes][google.cloud.aiplatform.v1.FeaturestoreService.ListEntityTypes\] must
/// match the call that provided the page token.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// A comma-separated list of fields to order by, sorted in ascending order.
/// Use "desc" after a field name for descending.
///
/// Supported fields:
///
/// * `entity_type_id`
/// * `create_time`
/// * `update_time`
#[prost(string, tag = "5")]
pub order_by: ::prost::alloc::string::String,
/// Mask specifying which fields to read.
#[prost(message, optional, tag = "6")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Response message for \[FeaturestoreService.ListEntityTypes][google.cloud.aiplatform.v1.FeaturestoreService.ListEntityTypes\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListEntityTypesResponse {
/// The EntityTypes matching the request.
#[prost(message, repeated, tag = "1")]
pub entity_types: ::prost::alloc::vec::Vec<EntityType>,
/// A token, which can be sent as \[ListEntityTypesRequest.page_token][google.cloud.aiplatform.v1.ListEntityTypesRequest.page_token\] to
/// retrieve the next page.
/// If this field is omitted, there are no subsequent pages.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[FeaturestoreService.UpdateEntityType][google.cloud.aiplatform.v1.FeaturestoreService.UpdateEntityType\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateEntityTypeRequest {
/// Required. The EntityType's `name` field is used to identify the EntityType to be
/// updated.
/// Format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
#[prost(message, optional, tag = "1")]
pub entity_type: ::core::option::Option<EntityType>,
/// Field mask is used to specify the fields to be overwritten in the
/// EntityType resource by the update.
/// The fields specified in the update_mask are relative to the resource, not
/// the full request. A field will be overwritten if it is in the mask. If the
/// user does not provide a mask then only the non-empty fields present in the
/// request will be overwritten. Set the update_mask to `*` to override all
/// fields.
///
/// Updatable fields:
///
/// * `description`
/// * `labels`
/// * `monitoring_config.snapshot_analysis.disabled`
/// * `monitoring_config.snapshot_analysis.monitoring_interval`
#[prost(message, optional, tag = "2")]
pub update_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Request message for \[FeaturestoreService.DeleteEntityTypes][\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteEntityTypeRequest {
/// Required. The name of the EntityType to be deleted.
/// Format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// If set to true, any Features for this EntityType will also be deleted.
/// (Otherwise, the request will only work if the EntityType has no Features.)
#[prost(bool, tag = "2")]
pub force: bool,
}
/// Request message for \[FeaturestoreService.CreateFeature][google.cloud.aiplatform.v1.FeaturestoreService.CreateFeature\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateFeatureRequest {
/// Required. The resource name of the EntityType to create a Feature.
/// Format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The Feature to create.
#[prost(message, optional, tag = "2")]
pub feature: ::core::option::Option<Feature>,
/// Required. The ID to use for the Feature, which will become the final component of
/// the Feature's resource name.
///
/// This value may be up to 60 characters, and valid characters are
/// `\[a-z0-9_\]`. The first character cannot be a number.
///
/// The value must be unique within an EntityType.
#[prost(string, tag = "3")]
pub feature_id: ::prost::alloc::string::String,
}
/// Request message for \[FeaturestoreService.BatchCreateFeatures][google.cloud.aiplatform.v1.FeaturestoreService.BatchCreateFeatures\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BatchCreateFeaturesRequest {
/// Required. The resource name of the EntityType to create the batch of Features under.
/// Format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The request message specifying the Features to create. All Features must be
/// created under the same parent EntityType. The `parent` field in each child
/// request message can be omitted. If `parent` is set in a child request, then
/// the value must match the `parent` value in this request message.
#[prost(message, repeated, tag = "2")]
pub requests: ::prost::alloc::vec::Vec<CreateFeatureRequest>,
}
/// Response message for \[FeaturestoreService.BatchCreateFeatures][google.cloud.aiplatform.v1.FeaturestoreService.BatchCreateFeatures\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BatchCreateFeaturesResponse {
/// The Features created.
#[prost(message, repeated, tag = "1")]
pub features: ::prost::alloc::vec::Vec<Feature>,
}
/// Request message for \[FeaturestoreService.GetFeature][google.cloud.aiplatform.v1.FeaturestoreService.GetFeature\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetFeatureRequest {
/// Required. The name of the Feature resource.
/// Format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[FeaturestoreService.ListFeatures][google.cloud.aiplatform.v1.FeaturestoreService.ListFeatures\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListFeaturesRequest {
/// Required. The resource name of the Location to list Features.
/// Format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Lists the Features that match the filter expression. The following
/// filters are supported:
///
/// * `value_type`: Supports = and != comparisons.
/// * `create_time`: Supports =, !=, <, >, >=, and <= comparisons. Values must
/// be in RFC 3339 format.
/// * `update_time`: Supports =, !=, <, >, >=, and <= comparisons. Values must
/// be in RFC 3339 format.
/// * `labels`: Supports key-value equality as well as key presence.
///
/// Examples:
///
/// * `value_type = DOUBLE` --> Features whose type is DOUBLE.
/// * `create_time > \"2020-01-31T15:30:00.000000Z\" OR
/// update_time > \"2020-01-31T15:30:00.000000Z\"` --> EntityTypes created
/// or updated after 2020-01-31T15:30:00.000000Z.
/// * `labels.active = yes AND labels.env = prod` --> Features having both
/// (active: yes) and (env: prod) labels.
/// * `labels.env: *` --> Any Feature which has a label with 'env' as the
/// key.
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The maximum number of Features to return. The service may return fewer
/// than this value. If unspecified, at most 1000 Features will be returned.
/// The maximum value is 1000; any value greater than 1000 will be coerced to
/// 1000.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// A page token, received from a previous
/// \[FeaturestoreService.ListFeatures][google.cloud.aiplatform.v1.FeaturestoreService.ListFeatures\] call.
/// Provide this to retrieve the subsequent page.
///
/// When paginating, all other parameters provided to
/// \[FeaturestoreService.ListFeatures][google.cloud.aiplatform.v1.FeaturestoreService.ListFeatures\] must
/// match the call that provided the page token.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// A comma-separated list of fields to order by, sorted in ascending order.
/// Use "desc" after a field name for descending.
/// Supported fields:
///
/// * `feature_id`
/// * `value_type`
/// * `create_time`
/// * `update_time`
#[prost(string, tag = "5")]
pub order_by: ::prost::alloc::string::String,
/// Mask specifying which fields to read.
#[prost(message, optional, tag = "6")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
/// If set, return the most recent \[ListFeaturesRequest.latest_stats_count][google.cloud.aiplatform.v1.ListFeaturesRequest.latest_stats_count\]
/// of stats for each Feature in response. Valid value is [0, 10]. If number of
/// stats exists < \[ListFeaturesRequest.latest_stats_count][google.cloud.aiplatform.v1.ListFeaturesRequest.latest_stats_count\], return all
/// existing stats.
#[prost(int32, tag = "7")]
pub latest_stats_count: i32,
}
/// Response message for \[FeaturestoreService.ListFeatures][google.cloud.aiplatform.v1.FeaturestoreService.ListFeatures\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListFeaturesResponse {
/// The Features matching the request.
#[prost(message, repeated, tag = "1")]
pub features: ::prost::alloc::vec::Vec<Feature>,
/// A token, which can be sent as \[ListFeaturesRequest.page_token][google.cloud.aiplatform.v1.ListFeaturesRequest.page_token\] to
/// retrieve the next page.
/// If this field is omitted, there are no subsequent pages.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[FeaturestoreService.SearchFeatures][google.cloud.aiplatform.v1.FeaturestoreService.SearchFeatures\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SearchFeaturesRequest {
/// Required. The resource name of the Location to search Features.
/// Format:
/// `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub location: ::prost::alloc::string::String,
/// Query string that is a conjunction of field-restricted queries and/or
/// field-restricted filters. Field-restricted queries and filters can be
/// combined using `AND` to form a conjunction.
///
/// A field query is in the form FIELD:QUERY. This implicitly checks if QUERY
/// exists as a substring within Feature's FIELD. The QUERY
/// and the FIELD are converted to a sequence of words (i.e. tokens) for
/// comparison. This is done by:
///
/// * Removing leading/trailing whitespace and tokenizing the search value.
/// Characters that are not one of alphanumeric `\[a-zA-Z0-9\]`, underscore
/// `_`, or asterisk `*` are treated as delimiters for tokens. `*` is treated
/// as a wildcard that matches characters within a token.
/// * Ignoring case.
/// * Prepending an asterisk to the first and appending an asterisk to the
/// last token in QUERY.
///
/// A QUERY must be either a singular token or a phrase. A phrase is one or
/// multiple words enclosed in double quotation marks ("). With phrases, the
/// order of the words is important. Words in the phrase must be matching in
/// order and consecutively.
///
/// Supported FIELDs for field-restricted queries:
///
/// * `feature_id`
/// * `description`
/// * `entity_type_id`
///
/// Examples:
///
/// * `feature_id: foo` --> Matches a Feature with ID containing the substring
/// `foo` (eg. `foo`, `foofeature`, `barfoo`).
/// * `feature_id: foo*feature` --> Matches a Feature with ID containing the
/// substring `foo*feature` (eg. `foobarfeature`).
/// * `feature_id: foo AND description: bar` --> Matches a Feature with ID
/// containing the substring `foo` and description containing the substring
/// `bar`.
///
///
/// Besides field queries, the following exact-match filters are
/// supported. The exact-match filters do not support wildcards. Unlike
/// field-restricted queries, exact-match filters are case-sensitive.
///
/// * `feature_id`: Supports = comparisons.
/// * `description`: Supports = comparisons. Multi-token filters should be
/// enclosed in quotes.
/// * `entity_type_id`: Supports = comparisons.
/// * `value_type`: Supports = and != comparisons.
/// * `labels`: Supports key-value equality as well as key presence.
/// * `featurestore_id`: Supports = comparisons.
///
/// Examples:
/// * `description = "foo bar"` --> Any Feature with description exactly equal
/// to `foo bar`
/// * `value_type = DOUBLE` --> Features whose type is DOUBLE.
/// * `labels.active = yes AND labels.env = prod` --> Features having both
/// (active: yes) and (env: prod) labels.
/// * `labels.env: *` --> Any Feature which has a label with `env` as the
/// key.
#[prost(string, tag = "3")]
pub query: ::prost::alloc::string::String,
/// The maximum number of Features to return. The service may return fewer
/// than this value. If unspecified, at most 100 Features will be returned.
/// The maximum value is 100; any value greater than 100 will be coerced to
/// 100.
#[prost(int32, tag = "4")]
pub page_size: i32,
/// A page token, received from a previous
/// \[FeaturestoreService.SearchFeatures][google.cloud.aiplatform.v1.FeaturestoreService.SearchFeatures\] call.
/// Provide this to retrieve the subsequent page.
///
/// When paginating, all other parameters provided to
/// \[FeaturestoreService.SearchFeatures][google.cloud.aiplatform.v1.FeaturestoreService.SearchFeatures\], except `page_size`, must
/// match the call that provided the page token.
#[prost(string, tag = "5")]
pub page_token: ::prost::alloc::string::String,
}
/// Response message for \[FeaturestoreService.SearchFeatures][google.cloud.aiplatform.v1.FeaturestoreService.SearchFeatures\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SearchFeaturesResponse {
/// The Features matching the request.
///
/// Fields returned:
///
/// * `name`
/// * `description`
/// * `labels`
/// * `create_time`
/// * `update_time`
#[prost(message, repeated, tag = "1")]
pub features: ::prost::alloc::vec::Vec<Feature>,
/// A token, which can be sent as \[SearchFeaturesRequest.page_token][google.cloud.aiplatform.v1.SearchFeaturesRequest.page_token\] to
/// retrieve the next page.
/// If this field is omitted, there are no subsequent pages.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[FeaturestoreService.UpdateFeature][google.cloud.aiplatform.v1.FeaturestoreService.UpdateFeature\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateFeatureRequest {
/// Required. The Feature's `name` field is used to identify the Feature to be
/// updated.
/// Format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}/features/{feature}`
#[prost(message, optional, tag = "1")]
pub feature: ::core::option::Option<Feature>,
/// Field mask is used to specify the fields to be overwritten in the
/// Features resource by the update.
/// The fields specified in the update_mask are relative to the resource, not
/// the full request. A field will be overwritten if it is in the mask. If the
/// user does not provide a mask then only the non-empty fields present in the
/// request will be overwritten. Set the update_mask to `*` to override all
/// fields.
///
/// Updatable fields:
///
/// * `description`
/// * `labels`
/// * `monitoring_config.snapshot_analysis.disabled`
/// * `monitoring_config.snapshot_analysis.monitoring_interval`
#[prost(message, optional, tag = "2")]
pub update_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Request message for \[FeaturestoreService.DeleteFeature][google.cloud.aiplatform.v1.FeaturestoreService.DeleteFeature\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteFeatureRequest {
/// Required. The name of the Features to be deleted.
/// Format:
/// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}/features/{feature}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Details of operations that perform create Featurestore.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateFeaturestoreOperationMetadata {
/// Operation metadata for Featurestore.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
/// Details of operations that perform update Featurestore.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateFeaturestoreOperationMetadata {
/// Operation metadata for Featurestore.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
/// Details of operations that perform import feature values.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ImportFeatureValuesOperationMetadata {
/// Operation metadata for Featurestore import feature values.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
/// Number of entities that have been imported by the operation.
#[prost(int64, tag = "2")]
pub imported_entity_count: i64,
/// Number of feature values that have been imported by the operation.
#[prost(int64, tag = "3")]
pub imported_feature_value_count: i64,
/// The number of rows in input source that weren't imported due to either
/// * Not having any featureValues.
/// * Having a null entityId.
/// * Having a null timestamp.
/// * Not being parsable (applicable for CSV sources).
#[prost(int64, tag = "6")]
pub invalid_row_count: i64,
}
/// Details of operations that exports Features values.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExportFeatureValuesOperationMetadata {
/// Operation metadata for Featurestore export Feature values.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
/// Details of operations that batch reads Feature values.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BatchReadFeatureValuesOperationMetadata {
/// Operation metadata for Featurestore batch read Features values.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
/// Details of operations that perform create EntityType.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateEntityTypeOperationMetadata {
/// Operation metadata for EntityType.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
/// Details of operations that perform create Feature.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateFeatureOperationMetadata {
/// Operation metadata for Feature.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
/// Details of operations that perform batch create Features.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BatchCreateFeaturesOperationMetadata {
/// Operation metadata for Feature.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
#[doc = r" Generated client implementations."]
pub mod featurestore_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " The service that handles CRUD and List for resources for Featurestore."]
#[derive(Debug, Clone)]
pub struct FeaturestoreServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> FeaturestoreServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> FeaturestoreServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
FeaturestoreServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Creates a new Featurestore in a given project and location."]
pub async fn create_featurestore(
&mut self,
request: impl tonic::IntoRequest<super::CreateFeaturestoreRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreService/CreateFeaturestore",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets details of a single Featurestore."]
pub async fn get_featurestore(
&mut self,
request: impl tonic::IntoRequest<super::GetFeaturestoreRequest>,
) -> Result<tonic::Response<super::Featurestore>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreService/GetFeaturestore",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists Featurestores in a given project and location."]
pub async fn list_featurestores(
&mut self,
request: impl tonic::IntoRequest<super::ListFeaturestoresRequest>,
) -> Result<tonic::Response<super::ListFeaturestoresResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreService/ListFeaturestores",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates the parameters of a single Featurestore."]
pub async fn update_featurestore(
&mut self,
request: impl tonic::IntoRequest<super::UpdateFeaturestoreRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreService/UpdateFeaturestore",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a single Featurestore. The Featurestore must not contain any"]
#[doc = " EntityTypes or `force` must be set to true for the request to succeed."]
pub async fn delete_featurestore(
&mut self,
request: impl tonic::IntoRequest<super::DeleteFeaturestoreRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreService/DeleteFeaturestore",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Creates a new EntityType in a given Featurestore."]
pub async fn create_entity_type(
&mut self,
request: impl tonic::IntoRequest<super::CreateEntityTypeRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreService/CreateEntityType",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets details of a single EntityType."]
pub async fn get_entity_type(
&mut self,
request: impl tonic::IntoRequest<super::GetEntityTypeRequest>,
) -> Result<tonic::Response<super::EntityType>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreService/GetEntityType",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists EntityTypes in a given Featurestore."]
pub async fn list_entity_types(
&mut self,
request: impl tonic::IntoRequest<super::ListEntityTypesRequest>,
) -> Result<tonic::Response<super::ListEntityTypesResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreService/ListEntityTypes",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates the parameters of a single EntityType."]
pub async fn update_entity_type(
&mut self,
request: impl tonic::IntoRequest<super::UpdateEntityTypeRequest>,
) -> Result<tonic::Response<super::EntityType>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreService/UpdateEntityType",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a single EntityType. The EntityType must not have any Features"]
#[doc = " or `force` must be set to true for the request to succeed."]
pub async fn delete_entity_type(
&mut self,
request: impl tonic::IntoRequest<super::DeleteEntityTypeRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreService/DeleteEntityType",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Creates a new Feature in a given EntityType."]
pub async fn create_feature(
&mut self,
request: impl tonic::IntoRequest<super::CreateFeatureRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreService/CreateFeature",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Creates a batch of Features in a given EntityType."]
pub async fn batch_create_features(
&mut self,
request: impl tonic::IntoRequest<super::BatchCreateFeaturesRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreService/BatchCreateFeatures",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets details of a single Feature."]
pub async fn get_feature(
&mut self,
request: impl tonic::IntoRequest<super::GetFeatureRequest>,
) -> Result<tonic::Response<super::Feature>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreService/GetFeature",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists Features in a given EntityType."]
pub async fn list_features(
&mut self,
request: impl tonic::IntoRequest<super::ListFeaturesRequest>,
) -> Result<tonic::Response<super::ListFeaturesResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreService/ListFeatures",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates the parameters of a single Feature."]
pub async fn update_feature(
&mut self,
request: impl tonic::IntoRequest<super::UpdateFeatureRequest>,
) -> Result<tonic::Response<super::Feature>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreService/UpdateFeature",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a single Feature."]
pub async fn delete_feature(
&mut self,
request: impl tonic::IntoRequest<super::DeleteFeatureRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreService/DeleteFeature",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Imports Feature values into the Featurestore from a source storage."]
#[doc = ""]
#[doc = " The progress of the import is tracked by the returned operation. The"]
#[doc = " imported features are guaranteed to be visible to subsequent read"]
#[doc = " operations after the operation is marked as successfully done."]
#[doc = ""]
#[doc = " If an import operation fails, the Feature values returned from"]
#[doc = " reads and exports may be inconsistent. If consistency is"]
#[doc = " required, the caller must retry the same import request again and wait till"]
#[doc = " the new operation returned is marked as successfully done."]
#[doc = ""]
#[doc = " There are also scenarios where the caller can cause inconsistency."]
#[doc = ""]
#[doc = " - Source data for import contains multiple distinct Feature values for"]
#[doc = " the same entity ID and timestamp."]
#[doc = " - Source is modified during an import. This includes adding, updating, or"]
#[doc = " removing source data and/or metadata. Examples of updating metadata"]
#[doc = " include but are not limited to changing storage location, storage class,"]
#[doc = " or retention policy."]
#[doc = " - Online serving cluster is under-provisioned."]
pub async fn import_feature_values(
&mut self,
request: impl tonic::IntoRequest<super::ImportFeatureValuesRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreService/ImportFeatureValues",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Batch reads Feature values from a Featurestore."]
#[doc = ""]
#[doc = " This API enables batch reading Feature values, where each read"]
#[doc = " instance in the batch may read Feature values of entities from one or"]
#[doc = " more EntityTypes. Point-in-time correctness is guaranteed for Feature"]
#[doc = " values of each read instance as of each instance's read timestamp."]
pub async fn batch_read_feature_values(
&mut self,
request: impl tonic::IntoRequest<super::BatchReadFeatureValuesRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreService/BatchReadFeatureValues",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Exports Feature values from all the entities of a target EntityType."]
pub async fn export_feature_values(
&mut self,
request: impl tonic::IntoRequest<super::ExportFeatureValuesRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreService/ExportFeatureValues",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Searches Features matching a query in a given project."]
pub async fn search_features(
&mut self,
request: impl tonic::IntoRequest<super::SearchFeaturesRequest>,
) -> Result<tonic::Response<super::SearchFeaturesResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.FeaturestoreService/SearchFeatures",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
}
/// A message representing a Study.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Study {
/// Output only. The name of a study. The study's globally unique identifier.
/// Format: `projects/{project}/locations/{location}/studies/{study}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. Describes the Study, default value is empty string.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// Required. Configuration of the Study.
#[prost(message, optional, tag = "3")]
pub study_spec: ::core::option::Option<StudySpec>,
/// Output only. The detailed state of a Study.
#[prost(enumeration = "study::State", tag = "4")]
pub state: i32,
/// Output only. Time at which the study was created.
#[prost(message, optional, tag = "5")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. A human readable reason why the Study is inactive.
/// This should be empty if a study is ACTIVE or COMPLETED.
#[prost(string, tag = "6")]
pub inactive_reason: ::prost::alloc::string::String,
}
/// Nested message and enum types in `Study`.
pub mod study {
/// Describes the Study state.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum State {
/// The study state is unspecified.
Unspecified = 0,
/// The study is active.
Active = 1,
/// The study is stopped due to an internal error.
Inactive = 2,
/// The study is done when the service exhausts the parameter search space
/// or max_trial_count is reached.
Completed = 3,
}
}
/// A message representing a Trial. A Trial contains a unique set of Parameters
/// that has been or will be evaluated, along with the objective metrics got by
/// running the Trial.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Trial {
/// Output only. Resource name of the Trial assigned by the service.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Output only. The identifier of the Trial assigned by the service.
#[prost(string, tag = "2")]
pub id: ::prost::alloc::string::String,
/// Output only. The detailed state of the Trial.
#[prost(enumeration = "trial::State", tag = "3")]
pub state: i32,
/// Output only. The parameters of the Trial.
#[prost(message, repeated, tag = "4")]
pub parameters: ::prost::alloc::vec::Vec<trial::Parameter>,
/// Output only. The final measurement containing the objective value.
#[prost(message, optional, tag = "5")]
pub final_measurement: ::core::option::Option<Measurement>,
/// Output only. A list of measurements that are strictly lexicographically
/// ordered by their induced tuples (steps, elapsed_duration).
/// These are used for early stopping computations.
#[prost(message, repeated, tag = "6")]
pub measurements: ::prost::alloc::vec::Vec<Measurement>,
/// Output only. Time when the Trial was started.
#[prost(message, optional, tag = "7")]
pub start_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Time when the Trial's status changed to `SUCCEEDED` or `INFEASIBLE`.
#[prost(message, optional, tag = "8")]
pub end_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. The identifier of the client that originally requested this Trial.
/// Each client is identified by a unique client_id. When a client
/// asks for a suggestion, Vizier will assign it a Trial. The client should
/// evaluate the Trial, complete it, and report back to Vizier.
/// If suggestion is asked again by same client_id before the Trial is
/// completed, the same Trial will be returned. Multiple clients with
/// different client_ids can ask for suggestions simultaneously, each of them
/// will get their own Trial.
#[prost(string, tag = "9")]
pub client_id: ::prost::alloc::string::String,
/// Output only. A human readable string describing why the Trial is
/// infeasible. This is set only if Trial state is `INFEASIBLE`.
#[prost(string, tag = "10")]
pub infeasible_reason: ::prost::alloc::string::String,
/// Output only. The CustomJob name linked to the Trial.
/// It's set for a HyperparameterTuningJob's Trial.
#[prost(string, tag = "11")]
pub custom_job: ::prost::alloc::string::String,
/// Output only. URIs for accessing [interactive
/// shells](<https://cloud.google.com/vertex-ai/docs/training/monitor-debug-interactive-shell>)
/// (one URI for each training node). Only available if this trial is part of
/// a \[HyperparameterTuningJob][google.cloud.aiplatform.v1.HyperparameterTuningJob\] and the job's
/// \[trial_job_spec.enable_web_access][google.cloud.aiplatform.v1.CustomJobSpec.enable_web_access\] field
/// is `true`.
///
/// The keys are names of each node used for the trial; for example,
/// `workerpool0-0` for the primary node, `workerpool1-0` for the first node in
/// the second worker pool, and `workerpool1-1` for the second node in the
/// second worker pool.
///
/// The values are the URIs for each node's interactive shell.
#[prost(map = "string, string", tag = "12")]
pub web_access_uris:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
}
/// Nested message and enum types in `Trial`.
pub mod trial {
/// A message representing a parameter to be tuned.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Parameter {
/// Output only. The ID of the parameter. The parameter should be defined in
/// [StudySpec's Parameters]\[google.cloud.aiplatform.v1.StudySpec.parameters\].
#[prost(string, tag = "1")]
pub parameter_id: ::prost::alloc::string::String,
/// Output only. The value of the parameter.
/// `number_value` will be set if a parameter defined in StudySpec is
/// in type 'INTEGER', 'DOUBLE' or 'DISCRETE'.
/// `string_value` will be set if a parameter defined in StudySpec is
/// in type 'CATEGORICAL'.
#[prost(message, optional, tag = "2")]
pub value: ::core::option::Option<::prost_types::Value>,
}
/// Describes a Trial state.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum State {
/// The Trial state is unspecified.
Unspecified = 0,
/// Indicates that a specific Trial has been requested, but it has not yet
/// been suggested by the service.
Requested = 1,
/// Indicates that the Trial has been suggested.
Active = 2,
/// Indicates that the Trial should stop according to the service.
Stopping = 3,
/// Indicates that the Trial is completed successfully.
Succeeded = 4,
/// Indicates that the Trial should not be attempted again.
/// The service will set a Trial to INFEASIBLE when it's done but missing
/// the final_measurement.
Infeasible = 5,
}
}
/// Represents specification of a Study.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct StudySpec {
/// Required. Metric specs for the Study.
#[prost(message, repeated, tag = "1")]
pub metrics: ::prost::alloc::vec::Vec<study_spec::MetricSpec>,
/// Required. The set of parameters to tune.
#[prost(message, repeated, tag = "2")]
pub parameters: ::prost::alloc::vec::Vec<study_spec::ParameterSpec>,
/// The search algorithm specified for the Study.
#[prost(enumeration = "study_spec::Algorithm", tag = "3")]
pub algorithm: i32,
/// The observation noise level of the study.
/// Currently only supported by the Vizier service. Not supported by
/// HyperparamterTuningJob or TrainingPipeline.
#[prost(enumeration = "study_spec::ObservationNoise", tag = "6")]
pub observation_noise: i32,
/// Describe which measurement selection type will be used
#[prost(enumeration = "study_spec::MeasurementSelectionType", tag = "7")]
pub measurement_selection_type: i32,
#[prost(oneof = "study_spec::AutomatedStoppingSpec", tags = "4, 5")]
pub automated_stopping_spec: ::core::option::Option<study_spec::AutomatedStoppingSpec>,
}
/// Nested message and enum types in `StudySpec`.
pub mod study_spec {
/// Represents a metric to optimize.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MetricSpec {
/// Required. The ID of the metric. Must not contain whitespaces and must be unique
/// amongst all MetricSpecs.
#[prost(string, tag = "1")]
pub metric_id: ::prost::alloc::string::String,
/// Required. The optimization goal of the metric.
#[prost(enumeration = "metric_spec::GoalType", tag = "2")]
pub goal: i32,
}
/// Nested message and enum types in `MetricSpec`.
pub mod metric_spec {
/// The available types of optimization goals.
#[derive(
Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
)]
#[repr(i32)]
pub enum GoalType {
/// Goal Type will default to maximize.
Unspecified = 0,
/// Maximize the goal metric.
Maximize = 1,
/// Minimize the goal metric.
Minimize = 2,
}
}
/// Represents a single parameter to optimize.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ParameterSpec {
/// Required. The ID of the parameter. Must not contain whitespaces and must be unique
/// amongst all ParameterSpecs.
#[prost(string, tag = "1")]
pub parameter_id: ::prost::alloc::string::String,
/// How the parameter should be scaled.
/// Leave unset for `CATEGORICAL` parameters.
#[prost(enumeration = "parameter_spec::ScaleType", tag = "6")]
pub scale_type: i32,
/// A conditional parameter node is active if the parameter's value matches
/// the conditional node's parent_value_condition.
///
/// If two items in conditional_parameter_specs have the same name, they
/// must have disjoint parent_value_condition.
#[prost(message, repeated, tag = "10")]
pub conditional_parameter_specs:
::prost::alloc::vec::Vec<parameter_spec::ConditionalParameterSpec>,
#[prost(oneof = "parameter_spec::ParameterValueSpec", tags = "2, 3, 4, 5")]
pub parameter_value_spec: ::core::option::Option<parameter_spec::ParameterValueSpec>,
}
/// Nested message and enum types in `ParameterSpec`.
pub mod parameter_spec {
/// Value specification for a parameter in `DOUBLE` type.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DoubleValueSpec {
/// Required. Inclusive minimum value of the parameter.
#[prost(double, tag = "1")]
pub min_value: f64,
/// Required. Inclusive maximum value of the parameter.
#[prost(double, tag = "2")]
pub max_value: f64,
/// A default value for a `DOUBLE` parameter that is assumed to be a
/// relatively good starting point. Unset value signals that there is no
/// offered starting point.
///
/// Currently only supported by the Vizier service. Not supported by
/// HyperparamterTuningJob or TrainingPipeline.
#[prost(double, optional, tag = "4")]
pub default_value: ::core::option::Option<f64>,
}
/// Value specification for a parameter in `INTEGER` type.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct IntegerValueSpec {
/// Required. Inclusive minimum value of the parameter.
#[prost(int64, tag = "1")]
pub min_value: i64,
/// Required. Inclusive maximum value of the parameter.
#[prost(int64, tag = "2")]
pub max_value: i64,
/// A default value for an `INTEGER` parameter that is assumed to be a
/// relatively good starting point. Unset value signals that there is no
/// offered starting point.
///
/// Currently only supported by the Vizier service. Not supported by
/// HyperparamterTuningJob or TrainingPipeline.
#[prost(int64, optional, tag = "4")]
pub default_value: ::core::option::Option<i64>,
}
/// Value specification for a parameter in `CATEGORICAL` type.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CategoricalValueSpec {
/// Required. The list of possible categories.
#[prost(string, repeated, tag = "1")]
pub values: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// A default value for a `CATEGORICAL` parameter that is assumed to be a
/// relatively good starting point. Unset value signals that there is no
/// offered starting point.
///
/// Currently only supported by the Vizier service. Not supported by
/// HyperparamterTuningJob or TrainingPipeline.
#[prost(string, optional, tag = "3")]
pub default_value: ::core::option::Option<::prost::alloc::string::String>,
}
/// Value specification for a parameter in `DISCRETE` type.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DiscreteValueSpec {
/// Required. A list of possible values.
/// The list should be in increasing order and at least 1e-10 apart.
/// For instance, this parameter might have possible settings of 1.5, 2.5,
/// and 4.0. This list should not contain more than 1,000 values.
#[prost(double, repeated, packed = "false", tag = "1")]
pub values: ::prost::alloc::vec::Vec<f64>,
/// A default value for a `DISCRETE` parameter that is assumed to be a
/// relatively good starting point. Unset value signals that there is no
/// offered starting point. It automatically rounds to the
/// nearest feasible discrete point.
///
/// Currently only supported by the Vizier service. Not supported by
/// HyperparamterTuningJob or TrainingPipeline.
#[prost(double, optional, tag = "3")]
pub default_value: ::core::option::Option<f64>,
}
/// Represents a parameter spec with condition from its parent parameter.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ConditionalParameterSpec {
/// Required. The spec for a conditional parameter.
#[prost(message, optional, tag = "1")]
pub parameter_spec: ::core::option::Option<super::ParameterSpec>,
/// A set of parameter values from the parent ParameterSpec's feasible
/// space.
#[prost(oneof = "conditional_parameter_spec::ParentValueCondition", tags = "2, 3, 4")]
pub parent_value_condition:
::core::option::Option<conditional_parameter_spec::ParentValueCondition>,
}
/// Nested message and enum types in `ConditionalParameterSpec`.
pub mod conditional_parameter_spec {
/// Represents the spec to match discrete values from parent parameter.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DiscreteValueCondition {
/// Required. Matches values of the parent parameter of 'DISCRETE' type.
/// All values must exist in `discrete_value_spec` of parent parameter.
///
/// The Epsilon of the value matching is 1e-10.
#[prost(double, repeated, packed = "false", tag = "1")]
pub values: ::prost::alloc::vec::Vec<f64>,
}
/// Represents the spec to match integer values from parent parameter.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct IntValueCondition {
/// Required. Matches values of the parent parameter of 'INTEGER' type.
/// All values must lie in `integer_value_spec` of parent parameter.
#[prost(int64, repeated, packed = "false", tag = "1")]
pub values: ::prost::alloc::vec::Vec<i64>,
}
/// Represents the spec to match categorical values from parent parameter.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CategoricalValueCondition {
/// Required. Matches values of the parent parameter of 'CATEGORICAL' type.
/// All values must exist in `categorical_value_spec` of parent
/// parameter.
#[prost(string, repeated, tag = "1")]
pub values: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
/// A set of parameter values from the parent ParameterSpec's feasible
/// space.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum ParentValueCondition {
/// The spec for matching values from a parent parameter of
/// `DISCRETE` type.
#[prost(message, tag = "2")]
ParentDiscreteValues(DiscreteValueCondition),
/// The spec for matching values from a parent parameter of `INTEGER`
/// type.
#[prost(message, tag = "3")]
ParentIntValues(IntValueCondition),
/// The spec for matching values from a parent parameter of
/// `CATEGORICAL` type.
#[prost(message, tag = "4")]
ParentCategoricalValues(CategoricalValueCondition),
}
}
/// The type of scaling that should be applied to this parameter.
#[derive(
Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
)]
#[repr(i32)]
pub enum ScaleType {
/// By default, no scaling is applied.
Unspecified = 0,
/// Scales the feasible space to (0, 1) linearly.
UnitLinearScale = 1,
/// Scales the feasible space logarithmically to (0, 1). The entire
/// feasible space must be strictly positive.
UnitLogScale = 2,
/// Scales the feasible space "reverse" logarithmically to (0, 1). The
/// result is that values close to the top of the feasible space are spread
/// out more than points near the bottom. The entire feasible space must be
/// strictly positive.
UnitReverseLogScale = 3,
}
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum ParameterValueSpec {
/// The value spec for a 'DOUBLE' parameter.
#[prost(message, tag = "2")]
DoubleValueSpec(DoubleValueSpec),
/// The value spec for an 'INTEGER' parameter.
#[prost(message, tag = "3")]
IntegerValueSpec(IntegerValueSpec),
/// The value spec for a 'CATEGORICAL' parameter.
#[prost(message, tag = "4")]
CategoricalValueSpec(CategoricalValueSpec),
/// The value spec for a 'DISCRETE' parameter.
#[prost(message, tag = "5")]
DiscreteValueSpec(DiscreteValueSpec),
}
}
/// The decay curve automated stopping rule builds a Gaussian Process
/// Regressor to predict the final objective value of a Trial based on the
/// already completed Trials and the intermediate measurements of the current
/// Trial. Early stopping is requested for the current Trial if there is very
/// low probability to exceed the optimal value found so far.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DecayCurveAutomatedStoppingSpec {
/// True if \[Measurement.elapsed_duration][google.cloud.aiplatform.v1.Measurement.elapsed_duration\] is used as the x-axis of each
/// Trials Decay Curve. Otherwise, \[Measurement.step_count][google.cloud.aiplatform.v1.Measurement.step_count\] will be used
/// as the x-axis.
#[prost(bool, tag = "1")]
pub use_elapsed_duration: bool,
}
/// The median automated stopping rule stops a pending Trial if the Trial's
/// best objective_value is strictly below the median 'performance' of all
/// completed Trials reported up to the Trial's last measurement.
/// Currently, 'performance' refers to the running average of the objective
/// values reported by the Trial in each measurement.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MedianAutomatedStoppingSpec {
/// True if median automated stopping rule applies on
/// \[Measurement.elapsed_duration][google.cloud.aiplatform.v1.Measurement.elapsed_duration\]. It means that elapsed_duration
/// field of latest measurement of current Trial is used to compute median
/// objective value for each completed Trials.
#[prost(bool, tag = "1")]
pub use_elapsed_duration: bool,
}
/// The available search algorithms for the Study.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum Algorithm {
/// The default algorithm used by Vertex AI for [hyperparameter
/// tuning](<https://cloud.google.com/vertex-ai/docs/training/hyperparameter-tuning-overview>)
/// and [Vertex Vizier](<https://cloud.google.com/vertex-ai/docs/vizier>).
Unspecified = 0,
/// Simple grid search within the feasible space. To use grid search,
/// all parameters must be `INTEGER`, `CATEGORICAL`, or `DISCRETE`.
GridSearch = 2,
/// Simple random search within the feasible space.
RandomSearch = 3,
}
/// Describes the noise level of the repeated observations.
///
/// "Noisy" means that the repeated observations with the same Trial parameters
/// may lead to different metric evaluations.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum ObservationNoise {
/// The default noise level chosen by Vertex AI.
Unspecified = 0,
/// Vertex AI assumes that the objective function is (nearly)
/// perfectly reproducible, and will never repeat the same Trial
/// parameters.
Low = 1,
/// Vertex AI will estimate the amount of noise in metric
/// evaluations, it may repeat the same Trial parameters more than once.
High = 2,
}
/// This indicates which measurement to use if/when the service automatically
/// selects the final measurement from previously reported intermediate
/// measurements. Choose this based on two considerations:
/// A) Do you expect your measurements to monotonically improve?
/// If so, choose LAST_MEASUREMENT. On the other hand, if you're in a
/// situation where your system can "over-train" and you expect the
/// performance to get better for a while but then start declining,
/// choose BEST_MEASUREMENT.
/// B) Are your measurements significantly noisy and/or irreproducible?
/// If so, BEST_MEASUREMENT will tend to be over-optimistic, and it
/// may be better to choose LAST_MEASUREMENT.
/// If both or neither of (A) and (B) apply, it doesn't matter which
/// selection type is chosen.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum MeasurementSelectionType {
/// Will be treated as LAST_MEASUREMENT.
Unspecified = 0,
/// Use the last measurement reported.
LastMeasurement = 1,
/// Use the best measurement reported.
BestMeasurement = 2,
}
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum AutomatedStoppingSpec {
/// The automated early stopping spec using decay curve rule.
#[prost(message, tag = "4")]
DecayCurveStoppingSpec(DecayCurveAutomatedStoppingSpec),
/// The automated early stopping spec using median rule.
#[prost(message, tag = "5")]
MedianAutomatedStoppingSpec(MedianAutomatedStoppingSpec),
}
}
/// A message representing a Measurement of a Trial. A Measurement contains
/// the Metrics got by executing a Trial using suggested hyperparameter
/// values.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Measurement {
/// Output only. Time that the Trial has been running at the point of this Measurement.
#[prost(message, optional, tag = "1")]
pub elapsed_duration: ::core::option::Option<::prost_types::Duration>,
/// Output only. The number of steps the machine learning model has been trained for.
/// Must be non-negative.
#[prost(int64, tag = "2")]
pub step_count: i64,
/// Output only. A list of metrics got by evaluating the objective functions using suggested
/// Parameter values.
#[prost(message, repeated, tag = "3")]
pub metrics: ::prost::alloc::vec::Vec<measurement::Metric>,
}
/// Nested message and enum types in `Measurement`.
pub mod measurement {
/// A message representing a metric in the measurement.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Metric {
/// Output only. The ID of the Metric. The Metric should be defined in
/// [StudySpec's Metrics]\[google.cloud.aiplatform.v1.StudySpec.metrics\].
#[prost(string, tag = "1")]
pub metric_id: ::prost::alloc::string::String,
/// Output only. The value for this metric.
#[prost(double, tag = "2")]
pub value: f64,
}
}
/// Represents a HyperparameterTuningJob. A HyperparameterTuningJob
/// has a Study specification and multiple CustomJobs with identical
/// CustomJob specification.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct HyperparameterTuningJob {
/// Output only. Resource name of the HyperparameterTuningJob.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. The display name of the HyperparameterTuningJob.
/// The name can be up to 128 characters long and can be consist of any UTF-8
/// characters.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// Required. Study configuration of the HyperparameterTuningJob.
#[prost(message, optional, tag = "4")]
pub study_spec: ::core::option::Option<StudySpec>,
/// Required. The desired total number of Trials.
#[prost(int32, tag = "5")]
pub max_trial_count: i32,
/// Required. The desired number of Trials to run in parallel.
#[prost(int32, tag = "6")]
pub parallel_trial_count: i32,
/// The number of failed Trials that need to be seen before failing
/// the HyperparameterTuningJob.
///
/// If set to 0, Vertex AI decides how many Trials must fail
/// before the whole job fails.
#[prost(int32, tag = "7")]
pub max_failed_trial_count: i32,
/// Required. The spec of a trial job. The same spec applies to the CustomJobs created
/// in all the trials.
#[prost(message, optional, tag = "8")]
pub trial_job_spec: ::core::option::Option<CustomJobSpec>,
/// Output only. Trials of the HyperparameterTuningJob.
#[prost(message, repeated, tag = "9")]
pub trials: ::prost::alloc::vec::Vec<Trial>,
/// Output only. The detailed state of the job.
#[prost(enumeration = "JobState", tag = "10")]
pub state: i32,
/// Output only. Time when the HyperparameterTuningJob was created.
#[prost(message, optional, tag = "11")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Time when the HyperparameterTuningJob for the first time entered the
/// `JOB_STATE_RUNNING` state.
#[prost(message, optional, tag = "12")]
pub start_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Time when the HyperparameterTuningJob entered any of the following states:
/// `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`.
#[prost(message, optional, tag = "13")]
pub end_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Time when the HyperparameterTuningJob was most recently updated.
#[prost(message, optional, tag = "14")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Only populated when job's state is JOB_STATE_FAILED or
/// JOB_STATE_CANCELLED.
#[prost(message, optional, tag = "15")]
pub error: ::core::option::Option<super::super::super::rpc::Status>,
/// The labels with user-defined metadata to organize HyperparameterTuningJobs.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
///
/// See <https://goo.gl/xmQnxf> for more information and examples of labels.
#[prost(map = "string, string", tag = "16")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Customer-managed encryption key options for a HyperparameterTuningJob.
/// If this is set, then all resources created by the HyperparameterTuningJob
/// will be encrypted with the provided encryption key.
#[prost(message, optional, tag = "17")]
pub encryption_spec: ::core::option::Option<EncryptionSpec>,
}
/// A representation of a collection of database items organized in a way that
/// allows for approximate nearest neighbor (a.k.a ANN) algorithms search.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Index {
/// Output only. The resource name of the Index.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. The display name of the Index.
/// The name can be up to 128 characters long and can be consist of any UTF-8
/// characters.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// The description of the Index.
#[prost(string, tag = "3")]
pub description: ::prost::alloc::string::String,
/// Immutable. Points to a YAML file stored on Google Cloud Storage describing additional
/// information about the Index, that is specific to it. Unset if the Index
/// does not have any additional information.
/// The schema is defined as an OpenAPI 3.0.2 [Schema
/// Object](<https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject>).
/// Note: The URI given on output will be immutable and probably different,
/// including the URI scheme, than the one given on input. The output URI will
/// point to a location where the user only has a read access.
#[prost(string, tag = "4")]
pub metadata_schema_uri: ::prost::alloc::string::String,
/// An additional information about the Index; the schema of the metadata can
/// be found in \[metadata_schema][google.cloud.aiplatform.v1.Index.metadata_schema_uri\].
#[prost(message, optional, tag = "6")]
pub metadata: ::core::option::Option<::prost_types::Value>,
/// Output only. The pointers to DeployedIndexes created from this Index.
/// An Index can be only deleted if all its DeployedIndexes had been undeployed
/// first.
#[prost(message, repeated, tag = "7")]
pub deployed_indexes: ::prost::alloc::vec::Vec<DeployedIndexRef>,
/// Used to perform consistent read-modify-write updates. If not set, a blind
/// "overwrite" update happens.
#[prost(string, tag = "8")]
pub etag: ::prost::alloc::string::String,
/// The labels with user-defined metadata to organize your Indexes.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
///
/// See <https://goo.gl/xmQnxf> for more information and examples of labels.
#[prost(map = "string, string", tag = "9")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Output only. Timestamp when this Index was created.
#[prost(message, optional, tag = "10")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this Index was most recently updated.
/// This also includes any update to the contents of the Index.
/// Note that Operations working on this Index may have their
/// \[Operations.metadata.generic_metadata.update_time\]
/// \[google.cloud.aiplatform.v1.GenericOperationMetadata.update_time\] a little after the value of this
/// timestamp, yet that does not mean their results are not already reflected
/// in the Index. Result of any successfully completed Operation on the Index
/// is reflected in it.
#[prost(message, optional, tag = "11")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
}
/// Indexes are deployed into it. An IndexEndpoint can have multiple
/// DeployedIndexes.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct IndexEndpoint {
/// Output only. The resource name of the IndexEndpoint.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. The display name of the IndexEndpoint.
/// The name can be up to 128 characters long and can consist of any UTF-8
/// characters.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// The description of the IndexEndpoint.
#[prost(string, tag = "3")]
pub description: ::prost::alloc::string::String,
/// Output only. The indexes deployed in this endpoint.
#[prost(message, repeated, tag = "4")]
pub deployed_indexes: ::prost::alloc::vec::Vec<DeployedIndex>,
/// Used to perform consistent read-modify-write updates. If not set, a blind
/// "overwrite" update happens.
#[prost(string, tag = "5")]
pub etag: ::prost::alloc::string::String,
/// The labels with user-defined metadata to organize your IndexEndpoints.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
///
/// See <https://goo.gl/xmQnxf> for more information and examples of labels.
#[prost(map = "string, string", tag = "6")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Output only. Timestamp when this IndexEndpoint was created.
#[prost(message, optional, tag = "7")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this IndexEndpoint was last updated.
/// This timestamp is not updated when the endpoint's DeployedIndexes are
/// updated, e.g. due to updates of the original Indexes they are the
/// deployments of.
#[prost(message, optional, tag = "8")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// Required. Immutable. The full name of the Google Compute Engine
/// \[network\](<https://cloud.google.com/compute/docs/networks-and-firewalls#networks>)
/// to which the IndexEndpoint should be peered.
///
/// Private services access must already be configured for the network. If left
/// unspecified, the Endpoint is not peered with any network.
///
/// \[Format\](<https://cloud.google.com/compute/docs/reference/rest/v1/networks/insert>):
/// projects/{project}/global/networks/{network}.
/// Where {project} is a project number, as in '12345', and {network} is
/// network name.
#[prost(string, tag = "9")]
pub network: ::prost::alloc::string::String,
}
/// A deployment of an Index. IndexEndpoints contain one or more DeployedIndexes.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeployedIndex {
/// Required. The user specified ID of the DeployedIndex.
/// The ID can be up to 128 characters long and must start with a letter and
/// only contain letters, numbers, and underscores.
/// The ID must be unique within the project it is created in.
#[prost(string, tag = "1")]
pub id: ::prost::alloc::string::String,
/// Required. The name of the Index this is the deployment of.
/// We may refer to this Index as the DeployedIndex's "original" Index.
#[prost(string, tag = "2")]
pub index: ::prost::alloc::string::String,
/// The display name of the DeployedIndex. If not provided upon creation,
/// the Index's display_name is used.
#[prost(string, tag = "3")]
pub display_name: ::prost::alloc::string::String,
/// Output only. Timestamp when the DeployedIndex was created.
#[prost(message, optional, tag = "4")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Provides paths for users to send requests directly to the deployed index
/// services running on Cloud via private services access. This field is
/// populated if \[network][google.cloud.aiplatform.v1.IndexEndpoint.network\] is configured.
#[prost(message, optional, tag = "5")]
pub private_endpoints: ::core::option::Option<IndexPrivateEndpoints>,
/// Output only. The DeployedIndex may depend on various data on its original Index.
/// Additionally when certain changes to the original Index are being done
/// (e.g. when what the Index contains is being changed) the DeployedIndex may
/// be asynchronously updated in the background to reflect this changes.
/// If this timestamp's value is at least the \[Index.update_time][google.cloud.aiplatform.v1.Index.update_time\] of the
/// original Index, it means that this DeployedIndex and the original Index are
/// in sync. If this timestamp is older, then to see which updates this
/// DeployedIndex already contains (and which not), one must
/// \[list][Operations.ListOperations\] \[Operations][Operation\]
/// \[working][Operation.name\] on the original Index. Only
/// the successfully completed Operations with
/// \[Operations.metadata.generic_metadata.update_time\]
/// \[google.cloud.aiplatform.v1.GenericOperationMetadata.update_time\]
/// equal or before this sync time are contained in this DeployedIndex.
#[prost(message, optional, tag = "6")]
pub index_sync_time: ::core::option::Option<::prost_types::Timestamp>,
/// Optional. A description of resources that the DeployedIndex uses, which to large
/// degree are decided by Vertex AI, and optionally allows only a modest
/// additional configuration.
/// If min_replica_count is not set, the default value is 2 (we don't provide
/// SLA when min_replica_count=1). If max_replica_count is not set, the
/// default value is min_replica_count. The max allowed replica count is
/// 1000.
#[prost(message, optional, tag = "7")]
pub automatic_resources: ::core::option::Option<AutomaticResources>,
/// Optional. If true, private endpoint's access logs are sent to StackDriver Logging.
///
/// These logs are like standard server access logs, containing
/// information like timestamp and latency for each MatchRequest.
///
/// Note that Stackdriver logs may incur a cost, especially if the deployed
/// index receives a high queries per second rate (QPS).
/// Estimate your costs before enabling this option.
#[prost(bool, tag = "8")]
pub enable_access_logging: bool,
/// Optional. If set, the authentication is enabled for the private endpoint.
#[prost(message, optional, tag = "9")]
pub deployed_index_auth_config: ::core::option::Option<DeployedIndexAuthConfig>,
/// Optional. A list of reserved ip ranges under the VPC network that can be
/// used for this DeployedIndex.
///
/// If set, we will deploy the index within the provided ip ranges. Otherwise,
/// the index might be deployed to any ip ranges under the provided VPC
/// network.
///
/// The value sohuld be the name of the address
/// (<https://cloud.google.com/compute/docs/reference/rest/v1/addresses>)
/// Example: 'vertex-ai-ip-range'.
#[prost(string, repeated, tag = "10")]
pub reserved_ip_ranges: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Optional. The deployment group can be no longer than 64 characters (eg:
/// 'test', 'prod'). If not set, we will use the 'default' deployment group.
///
/// Creating `deployment_groups` with `reserved_ip_ranges` is a recommended
/// practice when the peered network has multiple peering ranges. This creates
/// your deployments from predictable IP spaces for easier traffic
/// administration. Also, one deployment_group (except 'default') can only be
/// used with the same reserved_ip_ranges which means if the deployment_group
/// has been used with reserved_ip_ranges: [a, b, c], using it with [a, b] or
/// [d, e] is disallowed.
///
/// Note: we only support up to 5 deployment groups(not including 'default').
#[prost(string, tag = "11")]
pub deployment_group: ::prost::alloc::string::String,
}
/// Used to set up the auth on the DeployedIndex's private endpoint.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeployedIndexAuthConfig {
/// Defines the authentication provider that the DeployedIndex uses.
#[prost(message, optional, tag = "1")]
pub auth_provider: ::core::option::Option<deployed_index_auth_config::AuthProvider>,
}
/// Nested message and enum types in `DeployedIndexAuthConfig`.
pub mod deployed_index_auth_config {
/// Configuration for an authentication provider, including support for
/// [JSON Web Token
/// (JWT)](<https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32>).
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AuthProvider {
/// The list of JWT
/// \[audiences\](<https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32#section-4.1.3>).
/// that are allowed to access. A JWT containing any of these audiences will
/// be accepted.
#[prost(string, repeated, tag = "1")]
pub audiences: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// A list of allowed JWT issuers. Each entry must be a valid Google
/// service account, in the following format:
///
/// `[email protected]`
#[prost(string, repeated, tag = "2")]
pub allowed_issuers: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
}
/// IndexPrivateEndpoints proto is used to provide paths for users to send
/// requests via private services access.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct IndexPrivateEndpoints {
/// Output only. The ip address used to send match gRPC requests.
#[prost(string, tag = "1")]
pub match_grpc_address: ::prost::alloc::string::String,
}
/// Request message for \[IndexEndpointService.CreateIndexEndpoint][google.cloud.aiplatform.v1.IndexEndpointService.CreateIndexEndpoint\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateIndexEndpointRequest {
/// Required. The resource name of the Location to create the IndexEndpoint in.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The IndexEndpoint to create.
#[prost(message, optional, tag = "2")]
pub index_endpoint: ::core::option::Option<IndexEndpoint>,
}
/// Runtime operation information for
/// \[IndexEndpointService.CreateIndexEndpoint][google.cloud.aiplatform.v1.IndexEndpointService.CreateIndexEndpoint\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateIndexEndpointOperationMetadata {
/// The operation generic information.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
/// Request message for \[IndexEndpointService.GetIndexEndpoint][google.cloud.aiplatform.v1.IndexEndpointService.GetIndexEndpoint\]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetIndexEndpointRequest {
/// Required. The name of the IndexEndpoint resource.
/// Format:
/// `projects/{project}/locations/{location}/indexEndpoints/{index_endpoint}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[IndexEndpointService.ListIndexEndpoints][google.cloud.aiplatform.v1.IndexEndpointService.ListIndexEndpoints\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListIndexEndpointsRequest {
/// Required. The resource name of the Location from which to list the IndexEndpoints.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Optional. An expression for filtering the results of the request. For field names
/// both snake_case and camelCase are supported.
///
/// * `index_endpoint` supports = and !=. `index_endpoint` represents the
/// IndexEndpoint ID, ie. the last segment of the IndexEndpoint's
/// \[resourcename][google.cloud.aiplatform.v1.IndexEndpoint.name\].
/// * `display_name` supports =, != and regex()
/// (uses \[re2\](<https://github.com/google/re2/wiki/Syntax>) syntax)
/// * `labels` supports general map functions that is:
/// `labels.key=value` - key:value equality
/// `labels.key:* or labels:key - key existence
/// A key including a space must be quoted. `labels."a key"`.
///
/// Some examples:
/// * `index_endpoint="1"`
/// * `display_name="myDisplayName"`
/// * `regex(display_name, "^A") -> The display name starts with an A.
/// * `labels.myKey="myValue"`
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// Optional. The standard list page size.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// Optional. The standard list page token.
/// Typically obtained via
/// \[ListIndexEndpointsResponse.next_page_token][google.cloud.aiplatform.v1.ListIndexEndpointsResponse.next_page_token\] of the previous
/// \[IndexEndpointService.ListIndexEndpoints][google.cloud.aiplatform.v1.IndexEndpointService.ListIndexEndpoints\] call.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Optional. Mask specifying which fields to read.
#[prost(message, optional, tag = "5")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Response message for \[IndexEndpointService.ListIndexEndpoints][google.cloud.aiplatform.v1.IndexEndpointService.ListIndexEndpoints\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListIndexEndpointsResponse {
/// List of IndexEndpoints in the requested page.
#[prost(message, repeated, tag = "1")]
pub index_endpoints: ::prost::alloc::vec::Vec<IndexEndpoint>,
/// A token to retrieve next page of results.
/// Pass to \[ListIndexEndpointsRequest.page_token][google.cloud.aiplatform.v1.ListIndexEndpointsRequest.page_token\] to obtain that page.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[IndexEndpointService.UpdateIndexEndpoint][google.cloud.aiplatform.v1.IndexEndpointService.UpdateIndexEndpoint\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateIndexEndpointRequest {
/// Required. The IndexEndpoint which replaces the resource on the server.
#[prost(message, optional, tag = "1")]
pub index_endpoint: ::core::option::Option<IndexEndpoint>,
/// Required. The update mask applies to the resource. See \[google.protobuf.FieldMask][google.protobuf.FieldMask\].
#[prost(message, optional, tag = "2")]
pub update_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Request message for \[IndexEndpointService.DeleteIndexEndpoint][google.cloud.aiplatform.v1.IndexEndpointService.DeleteIndexEndpoint\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteIndexEndpointRequest {
/// Required. The name of the IndexEndpoint resource to be deleted.
/// Format:
/// `projects/{project}/locations/{location}/indexEndpoints/{index_endpoint}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[IndexEndpointService.DeployIndex][google.cloud.aiplatform.v1.IndexEndpointService.DeployIndex\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeployIndexRequest {
/// Required. The name of the IndexEndpoint resource into which to deploy an Index.
/// Format:
/// `projects/{project}/locations/{location}/indexEndpoints/{index_endpoint}`
#[prost(string, tag = "1")]
pub index_endpoint: ::prost::alloc::string::String,
/// Required. The DeployedIndex to be created within the IndexEndpoint.
#[prost(message, optional, tag = "2")]
pub deployed_index: ::core::option::Option<DeployedIndex>,
}
/// Response message for \[IndexEndpointService.DeployIndex][google.cloud.aiplatform.v1.IndexEndpointService.DeployIndex\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeployIndexResponse {
/// The DeployedIndex that had been deployed in the IndexEndpoint.
#[prost(message, optional, tag = "1")]
pub deployed_index: ::core::option::Option<DeployedIndex>,
}
/// Runtime operation information for \[IndexEndpointService.DeployIndex][google.cloud.aiplatform.v1.IndexEndpointService.DeployIndex\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeployIndexOperationMetadata {
/// The operation generic information.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
/// The unique index id specified by user
#[prost(string, tag = "2")]
pub deployed_index_id: ::prost::alloc::string::String,
}
/// Request message for \[IndexEndpointService.UndeployIndex][google.cloud.aiplatform.v1.IndexEndpointService.UndeployIndex\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UndeployIndexRequest {
/// Required. The name of the IndexEndpoint resource from which to undeploy an Index.
/// Format:
/// `projects/{project}/locations/{location}/indexEndpoints/{index_endpoint}`
#[prost(string, tag = "1")]
pub index_endpoint: ::prost::alloc::string::String,
/// Required. The ID of the DeployedIndex to be undeployed from the IndexEndpoint.
#[prost(string, tag = "2")]
pub deployed_index_id: ::prost::alloc::string::String,
}
/// Response message for \[IndexEndpointService.UndeployIndex][google.cloud.aiplatform.v1.IndexEndpointService.UndeployIndex\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UndeployIndexResponse {}
/// Runtime operation information for \[IndexEndpointService.UndeployIndex][google.cloud.aiplatform.v1.IndexEndpointService.UndeployIndex\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UndeployIndexOperationMetadata {
/// The operation generic information.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
#[doc = r" Generated client implementations."]
pub mod index_endpoint_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " A service for managing Vertex AI's IndexEndpoints."]
#[derive(Debug, Clone)]
pub struct IndexEndpointServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> IndexEndpointServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> IndexEndpointServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
IndexEndpointServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Creates an IndexEndpoint."]
pub async fn create_index_endpoint(
&mut self,
request: impl tonic::IntoRequest<super::CreateIndexEndpointRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.IndexEndpointService/CreateIndexEndpoint",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets an IndexEndpoint."]
pub async fn get_index_endpoint(
&mut self,
request: impl tonic::IntoRequest<super::GetIndexEndpointRequest>,
) -> Result<tonic::Response<super::IndexEndpoint>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.IndexEndpointService/GetIndexEndpoint",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists IndexEndpoints in a Location."]
pub async fn list_index_endpoints(
&mut self,
request: impl tonic::IntoRequest<super::ListIndexEndpointsRequest>,
) -> Result<tonic::Response<super::ListIndexEndpointsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.IndexEndpointService/ListIndexEndpoints",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates an IndexEndpoint."]
pub async fn update_index_endpoint(
&mut self,
request: impl tonic::IntoRequest<super::UpdateIndexEndpointRequest>,
) -> Result<tonic::Response<super::IndexEndpoint>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.IndexEndpointService/UpdateIndexEndpoint",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes an IndexEndpoint."]
pub async fn delete_index_endpoint(
&mut self,
request: impl tonic::IntoRequest<super::DeleteIndexEndpointRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.IndexEndpointService/DeleteIndexEndpoint",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deploys an Index into this IndexEndpoint, creating a DeployedIndex within"]
#[doc = " it."]
#[doc = " Only non-empty Indexes can be deployed."]
pub async fn deploy_index(
&mut self,
request: impl tonic::IntoRequest<super::DeployIndexRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.IndexEndpointService/DeployIndex",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Undeploys an Index from an IndexEndpoint, removing a DeployedIndex from it,"]
#[doc = " and freeing all resources it's using."]
pub async fn undeploy_index(
&mut self,
request: impl tonic::IntoRequest<super::UndeployIndexRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.IndexEndpointService/UndeployIndex",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
}
/// Request message for \[IndexService.CreateIndex][google.cloud.aiplatform.v1.IndexService.CreateIndex\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateIndexRequest {
/// Required. The resource name of the Location to create the Index in.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The Index to create.
#[prost(message, optional, tag = "2")]
pub index: ::core::option::Option<Index>,
}
/// Runtime operation information for \[IndexService.CreateIndex][google.cloud.aiplatform.v1.IndexService.CreateIndex\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateIndexOperationMetadata {
/// The operation generic information.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
/// The operation metadata with regard to Matching Engine Index operation.
#[prost(message, optional, tag = "2")]
pub nearest_neighbor_search_operation_metadata:
::core::option::Option<NearestNeighborSearchOperationMetadata>,
}
/// Request message for \[IndexService.GetIndex][google.cloud.aiplatform.v1.IndexService.GetIndex\]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetIndexRequest {
/// Required. The name of the Index resource.
/// Format:
/// `projects/{project}/locations/{location}/indexes/{index}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[IndexService.ListIndexes][google.cloud.aiplatform.v1.IndexService.ListIndexes\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListIndexesRequest {
/// Required. The resource name of the Location from which to list the Indexes.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The standard list filter.
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The standard list page size.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// The standard list page token.
/// Typically obtained via
/// \[ListIndexesResponse.next_page_token][google.cloud.aiplatform.v1.ListIndexesResponse.next_page_token\] of the previous
/// \[IndexService.ListIndexes][google.cloud.aiplatform.v1.IndexService.ListIndexes\] call.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Mask specifying which fields to read.
#[prost(message, optional, tag = "5")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Response message for \[IndexService.ListIndexes][google.cloud.aiplatform.v1.IndexService.ListIndexes\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListIndexesResponse {
/// List of indexes in the requested page.
#[prost(message, repeated, tag = "1")]
pub indexes: ::prost::alloc::vec::Vec<Index>,
/// A token to retrieve next page of results.
/// Pass to \[ListIndexesRequest.page_token][google.cloud.aiplatform.v1.ListIndexesRequest.page_token\] to obtain that page.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[IndexService.UpdateIndex][google.cloud.aiplatform.v1.IndexService.UpdateIndex\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateIndexRequest {
/// Required. The Index which updates the resource on the server.
#[prost(message, optional, tag = "1")]
pub index: ::core::option::Option<Index>,
/// The update mask applies to the resource.
/// For the `FieldMask` definition, see \[google.protobuf.FieldMask][google.protobuf.FieldMask\].
#[prost(message, optional, tag = "2")]
pub update_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Runtime operation information for \[IndexService.UpdateIndex][google.cloud.aiplatform.v1.IndexService.UpdateIndex\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateIndexOperationMetadata {
/// The operation generic information.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
/// The operation metadata with regard to Matching Engine Index operation.
#[prost(message, optional, tag = "2")]
pub nearest_neighbor_search_operation_metadata:
::core::option::Option<NearestNeighborSearchOperationMetadata>,
}
/// Request message for \[IndexService.DeleteIndex][google.cloud.aiplatform.v1.IndexService.DeleteIndex\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteIndexRequest {
/// Required. The name of the Index resource to be deleted.
/// Format:
/// `projects/{project}/locations/{location}/indexes/{index}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Runtime operation metadata with regard to Matching Engine Index.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct NearestNeighborSearchOperationMetadata {
/// The validation stats of the content (per file) to be inserted or
/// updated on the Matching Engine Index resource. Populated if
/// contentsDeltaUri is provided as part of \[Index.metadata][google.cloud.aiplatform.v1.Index.metadata\]. Please note
/// that, currently for those files that are broken or has unsupported file
/// format, we will not have the stats for those files.
#[prost(message, repeated, tag = "1")]
pub content_validation_stats: ::prost::alloc::vec::Vec<
nearest_neighbor_search_operation_metadata::ContentValidationStats,
>,
/// The ingested data size in bytes.
#[prost(int64, tag = "2")]
pub data_bytes_count: i64,
}
/// Nested message and enum types in `NearestNeighborSearchOperationMetadata`.
pub mod nearest_neighbor_search_operation_metadata {
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct RecordError {
/// The error type of this record.
#[prost(enumeration = "record_error::RecordErrorType", tag = "1")]
pub error_type: i32,
/// A human-readable message that is shown to the user to help them fix the
/// error. Note that this message may change from time to time, your code
/// should check against error_type as the source of truth.
#[prost(string, tag = "2")]
pub error_message: ::prost::alloc::string::String,
/// Cloud Storage URI pointing to the original file in user's bucket.
#[prost(string, tag = "3")]
pub source_gcs_uri: ::prost::alloc::string::String,
/// Empty if the embedding id is failed to parse.
#[prost(string, tag = "4")]
pub embedding_id: ::prost::alloc::string::String,
/// The original content of this record.
#[prost(string, tag = "5")]
pub raw_record: ::prost::alloc::string::String,
}
/// Nested message and enum types in `RecordError`.
pub mod record_error {
#[derive(
Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
)]
#[repr(i32)]
pub enum RecordErrorType {
/// Default, shall not be used.
ErrorTypeUnspecified = 0,
/// The record is empty.
EmptyLine = 1,
/// Invalid json format.
InvalidJsonSyntax = 2,
/// Invalid csv format.
InvalidCsvSyntax = 3,
/// Invalid avro format.
InvalidAvroSyntax = 4,
/// The embedding id is not valid.
InvalidEmbeddingId = 5,
/// The size of the embedding vectors does not match with the specified
/// dimension.
EmbeddingSizeMismatch = 6,
/// The `namespace` field is missing.
NamespaceMissing = 7,
}
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ContentValidationStats {
/// Cloud Storage URI pointing to the original file in user's bucket.
#[prost(string, tag = "1")]
pub source_gcs_uri: ::prost::alloc::string::String,
/// Number of records in this file that were successfully processed.
#[prost(int64, tag = "2")]
pub valid_record_count: i64,
/// Number of records in this file we skipped due to validate errors.
#[prost(int64, tag = "3")]
pub invalid_record_count: i64,
/// The detail information of the partial failures encountered for those
/// invalid records that couldn't be parsed.
/// Up to 50 partial errors will be reported.
#[prost(message, repeated, tag = "4")]
pub partial_errors: ::prost::alloc::vec::Vec<RecordError>,
}
}
#[doc = r" Generated client implementations."]
pub mod index_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " A service for creating and managing Vertex AI's Index resources."]
#[derive(Debug, Clone)]
pub struct IndexServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> IndexServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> IndexServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
IndexServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Creates an Index."]
pub async fn create_index(
&mut self,
request: impl tonic::IntoRequest<super::CreateIndexRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.IndexService/CreateIndex",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets an Index."]
pub async fn get_index(
&mut self,
request: impl tonic::IntoRequest<super::GetIndexRequest>,
) -> Result<tonic::Response<super::Index>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.IndexService/GetIndex",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists Indexes in a Location."]
pub async fn list_indexes(
&mut self,
request: impl tonic::IntoRequest<super::ListIndexesRequest>,
) -> Result<tonic::Response<super::ListIndexesResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.IndexService/ListIndexes",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates an Index."]
pub async fn update_index(
&mut self,
request: impl tonic::IntoRequest<super::UpdateIndexRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.IndexService/UpdateIndex",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes an Index."]
#[doc = " An Index can only be deleted when all its"]
#[doc = " [DeployedIndexes][google.cloud.aiplatform.v1.Index.deployed_indexes] had been undeployed."]
pub async fn delete_index(
&mut self,
request: impl tonic::IntoRequest<super::DeleteIndexRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.IndexService/DeleteIndex",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
}
/// Request message for \[JobService.CreateCustomJob][google.cloud.aiplatform.v1.JobService.CreateCustomJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateCustomJobRequest {
/// Required. The resource name of the Location to create the CustomJob in.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The CustomJob to create.
#[prost(message, optional, tag = "2")]
pub custom_job: ::core::option::Option<CustomJob>,
}
/// Request message for \[JobService.GetCustomJob][google.cloud.aiplatform.v1.JobService.GetCustomJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetCustomJobRequest {
/// Required. The name of the CustomJob resource.
/// Format:
/// `projects/{project}/locations/{location}/customJobs/{custom_job}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[JobService.ListCustomJobs][google.cloud.aiplatform.v1.JobService.ListCustomJobs\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListCustomJobsRequest {
/// Required. The resource name of the Location to list the CustomJobs from.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The standard list filter.
///
/// Supported fields:
///
/// * `display_name` supports = and !=.
///
/// * `state` supports = and !=.
///
/// Some examples of using the filter are:
///
/// * `state="JOB_STATE_SUCCEEDED" AND display_name="my_job"`
///
/// * `state="JOB_STATE_RUNNING" OR display_name="my_job"`
///
/// * `NOT display_name="my_job"`
///
/// * `state="JOB_STATE_FAILED"`
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The standard list page size.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// The standard list page token.
/// Typically obtained via
/// \[ListCustomJobsResponse.next_page_token][google.cloud.aiplatform.v1.ListCustomJobsResponse.next_page_token\] of the previous
/// \[JobService.ListCustomJobs][google.cloud.aiplatform.v1.JobService.ListCustomJobs\] call.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Mask specifying which fields to read.
#[prost(message, optional, tag = "5")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Response message for \[JobService.ListCustomJobs][google.cloud.aiplatform.v1.JobService.ListCustomJobs\]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListCustomJobsResponse {
/// List of CustomJobs in the requested page.
#[prost(message, repeated, tag = "1")]
pub custom_jobs: ::prost::alloc::vec::Vec<CustomJob>,
/// A token to retrieve the next page of results.
/// Pass to \[ListCustomJobsRequest.page_token][google.cloud.aiplatform.v1.ListCustomJobsRequest.page_token\] to obtain that page.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[JobService.DeleteCustomJob][google.cloud.aiplatform.v1.JobService.DeleteCustomJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteCustomJobRequest {
/// Required. The name of the CustomJob resource to be deleted.
/// Format:
/// `projects/{project}/locations/{location}/customJobs/{custom_job}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[JobService.CancelCustomJob][google.cloud.aiplatform.v1.JobService.CancelCustomJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CancelCustomJobRequest {
/// Required. The name of the CustomJob to cancel.
/// Format:
/// `projects/{project}/locations/{location}/customJobs/{custom_job}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[JobService.CreateDataLabelingJob][google.cloud.aiplatform.v1.JobService.CreateDataLabelingJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateDataLabelingJobRequest {
/// Required. The parent of the DataLabelingJob.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The DataLabelingJob to create.
#[prost(message, optional, tag = "2")]
pub data_labeling_job: ::core::option::Option<DataLabelingJob>,
}
/// Request message for \[JobService.GetDataLabelingJob][google.cloud.aiplatform.v1.JobService.GetDataLabelingJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetDataLabelingJobRequest {
/// Required. The name of the DataLabelingJob.
/// Format:
/// `projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[JobService.ListDataLabelingJobs][google.cloud.aiplatform.v1.JobService.ListDataLabelingJobs\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListDataLabelingJobsRequest {
/// Required. The parent of the DataLabelingJob.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The standard list filter.
///
/// Supported fields:
///
/// * `display_name` supports = and !=.
///
/// * `state` supports = and !=.
///
/// Some examples of using the filter are:
///
/// * `state="JOB_STATE_SUCCEEDED" AND display_name="my_job"`
///
/// * `state="JOB_STATE_RUNNING" OR display_name="my_job"`
///
/// * `NOT display_name="my_job"`
///
/// * `state="JOB_STATE_FAILED"`
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The standard list page size.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// The standard list page token.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Mask specifying which fields to read. FieldMask represents a set of
/// symbolic field paths. For example, the mask can be `paths: "name"`. The
/// "name" here is a field in DataLabelingJob.
/// If this field is not set, all fields of the DataLabelingJob are returned.
#[prost(message, optional, tag = "5")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
/// A comma-separated list of fields to order by, sorted in ascending order by
/// default.
/// Use `desc` after a field name for descending.
#[prost(string, tag = "6")]
pub order_by: ::prost::alloc::string::String,
}
/// Response message for \[JobService.ListDataLabelingJobs][google.cloud.aiplatform.v1.JobService.ListDataLabelingJobs\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListDataLabelingJobsResponse {
/// A list of DataLabelingJobs that matches the specified filter in the
/// request.
#[prost(message, repeated, tag = "1")]
pub data_labeling_jobs: ::prost::alloc::vec::Vec<DataLabelingJob>,
/// The standard List next-page token.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[JobService.DeleteDataLabelingJob][google.cloud.aiplatform.v1.JobService.DeleteDataLabelingJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteDataLabelingJobRequest {
/// Required. The name of the DataLabelingJob to be deleted.
/// Format:
/// `projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[JobService.CancelDataLabelingJob][google.cloud.aiplatform.v1.JobService.CancelDataLabelingJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CancelDataLabelingJobRequest {
/// Required. The name of the DataLabelingJob.
/// Format:
/// `projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[JobService.CreateHyperparameterTuningJob][google.cloud.aiplatform.v1.JobService.CreateHyperparameterTuningJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateHyperparameterTuningJobRequest {
/// Required. The resource name of the Location to create the HyperparameterTuningJob in.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The HyperparameterTuningJob to create.
#[prost(message, optional, tag = "2")]
pub hyperparameter_tuning_job: ::core::option::Option<HyperparameterTuningJob>,
}
/// Request message for \[JobService.GetHyperparameterTuningJob][google.cloud.aiplatform.v1.JobService.GetHyperparameterTuningJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetHyperparameterTuningJobRequest {
/// Required. The name of the HyperparameterTuningJob resource.
/// Format:
/// `projects/{project}/locations/{location}/hyperparameterTuningJobs/{hyperparameter_tuning_job}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[JobService.ListHyperparameterTuningJobs][google.cloud.aiplatform.v1.JobService.ListHyperparameterTuningJobs\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListHyperparameterTuningJobsRequest {
/// Required. The resource name of the Location to list the HyperparameterTuningJobs
/// from. Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The standard list filter.
///
/// Supported fields:
///
/// * `display_name` supports = and !=.
///
/// * `state` supports = and !=.
///
/// Some examples of using the filter are:
///
/// * `state="JOB_STATE_SUCCEEDED" AND display_name="my_job"`
///
/// * `state="JOB_STATE_RUNNING" OR display_name="my_job"`
///
/// * `NOT display_name="my_job"`
///
/// * `state="JOB_STATE_FAILED"`
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The standard list page size.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// The standard list page token.
/// Typically obtained via
/// \[ListHyperparameterTuningJobsResponse.next_page_token][google.cloud.aiplatform.v1.ListHyperparameterTuningJobsResponse.next_page_token\] of the previous
/// \[JobService.ListHyperparameterTuningJobs][google.cloud.aiplatform.v1.JobService.ListHyperparameterTuningJobs\] call.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Mask specifying which fields to read.
#[prost(message, optional, tag = "5")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Response message for \[JobService.ListHyperparameterTuningJobs][google.cloud.aiplatform.v1.JobService.ListHyperparameterTuningJobs\]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListHyperparameterTuningJobsResponse {
/// List of HyperparameterTuningJobs in the requested page.
/// \[HyperparameterTuningJob.trials][google.cloud.aiplatform.v1.HyperparameterTuningJob.trials\] of the jobs will be not be returned.
#[prost(message, repeated, tag = "1")]
pub hyperparameter_tuning_jobs: ::prost::alloc::vec::Vec<HyperparameterTuningJob>,
/// A token to retrieve the next page of results.
/// Pass to \[ListHyperparameterTuningJobsRequest.page_token][google.cloud.aiplatform.v1.ListHyperparameterTuningJobsRequest.page_token\] to obtain that
/// page.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[JobService.DeleteHyperparameterTuningJob][google.cloud.aiplatform.v1.JobService.DeleteHyperparameterTuningJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteHyperparameterTuningJobRequest {
/// Required. The name of the HyperparameterTuningJob resource to be deleted.
/// Format:
/// `projects/{project}/locations/{location}/hyperparameterTuningJobs/{hyperparameter_tuning_job}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[JobService.CancelHyperparameterTuningJob][google.cloud.aiplatform.v1.JobService.CancelHyperparameterTuningJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CancelHyperparameterTuningJobRequest {
/// Required. The name of the HyperparameterTuningJob to cancel.
/// Format:
/// `projects/{project}/locations/{location}/hyperparameterTuningJobs/{hyperparameter_tuning_job}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[JobService.CreateBatchPredictionJob][google.cloud.aiplatform.v1.JobService.CreateBatchPredictionJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateBatchPredictionJobRequest {
/// Required. The resource name of the Location to create the BatchPredictionJob in.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The BatchPredictionJob to create.
#[prost(message, optional, tag = "2")]
pub batch_prediction_job: ::core::option::Option<BatchPredictionJob>,
}
/// Request message for \[JobService.GetBatchPredictionJob][google.cloud.aiplatform.v1.JobService.GetBatchPredictionJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetBatchPredictionJobRequest {
/// Required. The name of the BatchPredictionJob resource.
/// Format:
/// `projects/{project}/locations/{location}/batchPredictionJobs/{batch_prediction_job}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[JobService.ListBatchPredictionJobs][google.cloud.aiplatform.v1.JobService.ListBatchPredictionJobs\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListBatchPredictionJobsRequest {
/// Required. The resource name of the Location to list the BatchPredictionJobs
/// from. Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The standard list filter.
///
/// Supported fields:
///
/// * `display_name` supports = and !=.
///
/// * `state` supports = and !=.
///
/// * `model_display_name` supports = and !=
///
/// Some examples of using the filter are:
///
/// * `state="JOB_STATE_SUCCEEDED" AND display_name="my_job"`
///
/// * `state="JOB_STATE_RUNNING" OR display_name="my_job"`
///
/// * `NOT display_name="my_job"`
///
/// * `state="JOB_STATE_FAILED"`
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The standard list page size.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// The standard list page token.
/// Typically obtained via
/// \[ListBatchPredictionJobsResponse.next_page_token][google.cloud.aiplatform.v1.ListBatchPredictionJobsResponse.next_page_token\] of the previous
/// \[JobService.ListBatchPredictionJobs][google.cloud.aiplatform.v1.JobService.ListBatchPredictionJobs\] call.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Mask specifying which fields to read.
#[prost(message, optional, tag = "5")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Response message for \[JobService.ListBatchPredictionJobs][google.cloud.aiplatform.v1.JobService.ListBatchPredictionJobs\]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListBatchPredictionJobsResponse {
/// List of BatchPredictionJobs in the requested page.
#[prost(message, repeated, tag = "1")]
pub batch_prediction_jobs: ::prost::alloc::vec::Vec<BatchPredictionJob>,
/// A token to retrieve the next page of results.
/// Pass to \[ListBatchPredictionJobsRequest.page_token][google.cloud.aiplatform.v1.ListBatchPredictionJobsRequest.page_token\] to obtain that
/// page.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[JobService.DeleteBatchPredictionJob][google.cloud.aiplatform.v1.JobService.DeleteBatchPredictionJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteBatchPredictionJobRequest {
/// Required. The name of the BatchPredictionJob resource to be deleted.
/// Format:
/// `projects/{project}/locations/{location}/batchPredictionJobs/{batch_prediction_job}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[JobService.CancelBatchPredictionJob][google.cloud.aiplatform.v1.JobService.CancelBatchPredictionJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CancelBatchPredictionJobRequest {
/// Required. The name of the BatchPredictionJob to cancel.
/// Format:
/// `projects/{project}/locations/{location}/batchPredictionJobs/{batch_prediction_job}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for
/// \[JobService.CreateModelDeploymentMonitoringJob][google.cloud.aiplatform.v1.JobService.CreateModelDeploymentMonitoringJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateModelDeploymentMonitoringJobRequest {
/// Required. The parent of the ModelDeploymentMonitoringJob.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The ModelDeploymentMonitoringJob to create
#[prost(message, optional, tag = "2")]
pub model_deployment_monitoring_job: ::core::option::Option<ModelDeploymentMonitoringJob>,
}
/// Request message for
/// \[JobService.SearchModelDeploymentMonitoringStatsAnomalies][google.cloud.aiplatform.v1.JobService.SearchModelDeploymentMonitoringStatsAnomalies\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SearchModelDeploymentMonitoringStatsAnomaliesRequest {
/// Required. ModelDeploymentMonitoring Job resource name.
/// Format:
/// `projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}
#[prost(string, tag = "1")]
pub model_deployment_monitoring_job: ::prost::alloc::string::String,
/// Required. The DeployedModel ID of the
/// \[google.cloud.aiplatform.master.ModelDeploymentMonitoringObjectiveConfig.deployed_model_id\].
#[prost(string, tag = "2")]
pub deployed_model_id: ::prost::alloc::string::String,
/// The feature display name. If specified, only return the stats belonging to
/// this feature. Format:
/// \[ModelMonitoringStatsAnomalies.FeatureHistoricStatsAnomalies.feature_display_name][google.cloud.aiplatform.v1.ModelMonitoringStatsAnomalies.FeatureHistoricStatsAnomalies.feature_display_name\],
/// example: "user_destination".
#[prost(string, tag = "3")]
pub feature_display_name: ::prost::alloc::string::String,
/// Required. Objectives of the stats to retrieve.
#[prost(message, repeated, tag = "4")]
pub objectives: ::prost::alloc::vec::Vec<
search_model_deployment_monitoring_stats_anomalies_request::StatsAnomaliesObjective,
>,
/// The standard list page size.
#[prost(int32, tag = "5")]
pub page_size: i32,
/// A page token received from a previous
/// \[JobService.SearchModelDeploymentMonitoringStatsAnomalies][google.cloud.aiplatform.v1.JobService.SearchModelDeploymentMonitoringStatsAnomalies\]
/// call.
#[prost(string, tag = "6")]
pub page_token: ::prost::alloc::string::String,
/// The earliest timestamp of stats being generated.
/// If not set, indicates fetching stats till the earliest possible one.
#[prost(message, optional, tag = "7")]
pub start_time: ::core::option::Option<::prost_types::Timestamp>,
/// The latest timestamp of stats being generated.
/// If not set, indicates feching stats till the latest possible one.
#[prost(message, optional, tag = "8")]
pub end_time: ::core::option::Option<::prost_types::Timestamp>,
}
/// Nested message and enum types in `SearchModelDeploymentMonitoringStatsAnomaliesRequest`.
pub mod search_model_deployment_monitoring_stats_anomalies_request {
/// Stats requested for specific objective.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct StatsAnomaliesObjective {
#[prost(enumeration = "super::ModelDeploymentMonitoringObjectiveType", tag = "1")]
pub r#type: i32,
/// If set, all attribution scores between
/// \[SearchModelDeploymentMonitoringStatsAnomaliesRequest.start_time][google.cloud.aiplatform.v1.SearchModelDeploymentMonitoringStatsAnomaliesRequest.start_time\] and
/// \[SearchModelDeploymentMonitoringStatsAnomaliesRequest.end_time][google.cloud.aiplatform.v1.SearchModelDeploymentMonitoringStatsAnomaliesRequest.end_time\] are
/// fetched, and page token doesn't take affect in this case.
/// Only used to retrieve attribution score for the top Features which has
/// the highest attribution score in the latest monitoring run.
#[prost(int32, tag = "4")]
pub top_feature_count: i32,
}
}
/// Response message for
/// \[JobService.SearchModelDeploymentMonitoringStatsAnomalies][google.cloud.aiplatform.v1.JobService.SearchModelDeploymentMonitoringStatsAnomalies\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SearchModelDeploymentMonitoringStatsAnomaliesResponse {
/// Stats retrieved for requested objectives.
/// There are at most 1000
/// \[ModelMonitoringStatsAnomalies.FeatureHistoricStatsAnomalies.prediction_stats][google.cloud.aiplatform.v1.ModelMonitoringStatsAnomalies.FeatureHistoricStatsAnomalies.prediction_stats\]
/// in the response.
#[prost(message, repeated, tag = "1")]
pub monitoring_stats: ::prost::alloc::vec::Vec<ModelMonitoringStatsAnomalies>,
/// The page token that can be used by the next
/// \[JobService.SearchModelDeploymentMonitoringStatsAnomalies][google.cloud.aiplatform.v1.JobService.SearchModelDeploymentMonitoringStatsAnomalies\]
/// call.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for
/// \[JobService.GetModelDeploymentMonitoringJob][google.cloud.aiplatform.v1.JobService.GetModelDeploymentMonitoringJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetModelDeploymentMonitoringJobRequest {
/// Required. The resource name of the ModelDeploymentMonitoringJob.
/// Format:
/// `projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for
/// \[JobService.ListModelDeploymentMonitoringJobs][google.cloud.aiplatform.v1.JobService.ListModelDeploymentMonitoringJobs\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListModelDeploymentMonitoringJobsRequest {
/// Required. The parent of the ModelDeploymentMonitoringJob.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The standard list filter.
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The standard list page size.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// The standard list page token.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Mask specifying which fields to read
#[prost(message, optional, tag = "5")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Response message for
/// \[JobService.ListModelDeploymentMonitoringJobs][google.cloud.aiplatform.v1.JobService.ListModelDeploymentMonitoringJobs\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListModelDeploymentMonitoringJobsResponse {
/// A list of ModelDeploymentMonitoringJobs that matches the specified filter
/// in the request.
#[prost(message, repeated, tag = "1")]
pub model_deployment_monitoring_jobs: ::prost::alloc::vec::Vec<ModelDeploymentMonitoringJob>,
/// The standard List next-page token.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for
/// \[JobService.UpdateModelDeploymentMonitoringJob][google.cloud.aiplatform.v1.JobService.UpdateModelDeploymentMonitoringJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateModelDeploymentMonitoringJobRequest {
/// Required. The model monitoring configuration which replaces the resource on the
/// server.
#[prost(message, optional, tag = "1")]
pub model_deployment_monitoring_job: ::core::option::Option<ModelDeploymentMonitoringJob>,
/// Required. The update mask is used to specify the fields to be overwritten in the
/// ModelDeploymentMonitoringJob resource by the update.
/// The fields specified in the update_mask are relative to the resource, not
/// the full request. A field will be overwritten if it is in the mask. If the
/// user does not provide a mask then only the non-empty fields present in the
/// request will be overwritten. Set the update_mask to `*` to override all
/// fields.
/// For the objective config, the user can either provide the update mask for
/// model_deployment_monitoring_objective_configs or any combination of its
/// nested fields, such as:
/// model_deployment_monitoring_objective_configs.objective_config.training_dataset.
///
/// Updatable fields:
///
/// * `display_name`
/// * `model_deployment_monitoring_schedule_config`
/// * `model_monitoring_alert_config`
/// * `logging_sampling_strategy`
/// * `labels`
/// * `log_ttl`
/// * `enable_monitoring_pipeline_logs`
/// . and
/// * `model_deployment_monitoring_objective_configs`
/// . or
/// * `model_deployment_monitoring_objective_configs.objective_config.training_dataset`
/// * `model_deployment_monitoring_objective_configs.objective_config.training_prediction_skew_detection_config`
/// * `model_deployment_monitoring_objective_configs.objective_config.prediction_drift_detection_config`
#[prost(message, optional, tag = "2")]
pub update_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Request message for
/// \[JobService.DeleteModelDeploymentMonitoringJob][google.cloud.aiplatform.v1.JobService.DeleteModelDeploymentMonitoringJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteModelDeploymentMonitoringJobRequest {
/// Required. The resource name of the model monitoring job to delete.
/// Format:
/// `projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for
/// \[JobService.PauseModelDeploymentMonitoringJob][google.cloud.aiplatform.v1.JobService.PauseModelDeploymentMonitoringJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PauseModelDeploymentMonitoringJobRequest {
/// Required. The resource name of the ModelDeploymentMonitoringJob to pause.
/// Format:
/// `projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for
/// \[JobService.ResumeModelDeploymentMonitoringJob][google.cloud.aiplatform.v1.JobService.ResumeModelDeploymentMonitoringJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ResumeModelDeploymentMonitoringJobRequest {
/// Required. The resource name of the ModelDeploymentMonitoringJob to resume.
/// Format:
/// `projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Runtime operation information for
/// \[JobService.UpdateModelDeploymentMonitoringJob][google.cloud.aiplatform.v1.JobService.UpdateModelDeploymentMonitoringJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateModelDeploymentMonitoringJobOperationMetadata {
/// The operation generic information.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
#[doc = r" Generated client implementations."]
pub mod job_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " A service for creating and managing Vertex AI's jobs."]
#[derive(Debug, Clone)]
pub struct JobServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> JobServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> JobServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
JobServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Creates a CustomJob. A created CustomJob right away"]
#[doc = " will be attempted to be run."]
pub async fn create_custom_job(
&mut self,
request: impl tonic::IntoRequest<super::CreateCustomJobRequest>,
) -> Result<tonic::Response<super::CustomJob>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/CreateCustomJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets a CustomJob."]
pub async fn get_custom_job(
&mut self,
request: impl tonic::IntoRequest<super::GetCustomJobRequest>,
) -> Result<tonic::Response<super::CustomJob>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/GetCustomJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists CustomJobs in a Location."]
pub async fn list_custom_jobs(
&mut self,
request: impl tonic::IntoRequest<super::ListCustomJobsRequest>,
) -> Result<tonic::Response<super::ListCustomJobsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/ListCustomJobs",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a CustomJob."]
pub async fn delete_custom_job(
&mut self,
request: impl tonic::IntoRequest<super::DeleteCustomJobRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/DeleteCustomJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Cancels a CustomJob."]
#[doc = " Starts asynchronous cancellation on the CustomJob. The server"]
#[doc = " makes a best effort to cancel the job, but success is not"]
#[doc = " guaranteed. Clients can use [JobService.GetCustomJob][google.cloud.aiplatform.v1.JobService.GetCustomJob] or"]
#[doc = " other methods to check whether the cancellation succeeded or whether the"]
#[doc = " job completed despite cancellation. On successful cancellation,"]
#[doc = " the CustomJob is not deleted; instead it becomes a job with"]
#[doc = " a [CustomJob.error][google.cloud.aiplatform.v1.CustomJob.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1,"]
#[doc = " corresponding to `Code.CANCELLED`, and [CustomJob.state][google.cloud.aiplatform.v1.CustomJob.state] is set to"]
#[doc = " `CANCELLED`."]
pub async fn cancel_custom_job(
&mut self,
request: impl tonic::IntoRequest<super::CancelCustomJobRequest>,
) -> Result<tonic::Response<()>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/CancelCustomJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Creates a DataLabelingJob."]
pub async fn create_data_labeling_job(
&mut self,
request: impl tonic::IntoRequest<super::CreateDataLabelingJobRequest>,
) -> Result<tonic::Response<super::DataLabelingJob>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/CreateDataLabelingJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets a DataLabelingJob."]
pub async fn get_data_labeling_job(
&mut self,
request: impl tonic::IntoRequest<super::GetDataLabelingJobRequest>,
) -> Result<tonic::Response<super::DataLabelingJob>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/GetDataLabelingJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists DataLabelingJobs in a Location."]
pub async fn list_data_labeling_jobs(
&mut self,
request: impl tonic::IntoRequest<super::ListDataLabelingJobsRequest>,
) -> Result<tonic::Response<super::ListDataLabelingJobsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/ListDataLabelingJobs",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a DataLabelingJob."]
pub async fn delete_data_labeling_job(
&mut self,
request: impl tonic::IntoRequest<super::DeleteDataLabelingJobRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/DeleteDataLabelingJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Cancels a DataLabelingJob. Success of cancellation is not guaranteed."]
pub async fn cancel_data_labeling_job(
&mut self,
request: impl tonic::IntoRequest<super::CancelDataLabelingJobRequest>,
) -> Result<tonic::Response<()>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/CancelDataLabelingJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Creates a HyperparameterTuningJob"]
pub async fn create_hyperparameter_tuning_job(
&mut self,
request: impl tonic::IntoRequest<super::CreateHyperparameterTuningJobRequest>,
) -> Result<tonic::Response<super::HyperparameterTuningJob>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/CreateHyperparameterTuningJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets a HyperparameterTuningJob"]
pub async fn get_hyperparameter_tuning_job(
&mut self,
request: impl tonic::IntoRequest<super::GetHyperparameterTuningJobRequest>,
) -> Result<tonic::Response<super::HyperparameterTuningJob>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/GetHyperparameterTuningJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists HyperparameterTuningJobs in a Location."]
pub async fn list_hyperparameter_tuning_jobs(
&mut self,
request: impl tonic::IntoRequest<super::ListHyperparameterTuningJobsRequest>,
) -> Result<tonic::Response<super::ListHyperparameterTuningJobsResponse>, tonic::Status>
{
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/ListHyperparameterTuningJobs",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a HyperparameterTuningJob."]
pub async fn delete_hyperparameter_tuning_job(
&mut self,
request: impl tonic::IntoRequest<super::DeleteHyperparameterTuningJobRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/DeleteHyperparameterTuningJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Cancels a HyperparameterTuningJob."]
#[doc = " Starts asynchronous cancellation on the HyperparameterTuningJob. The server"]
#[doc = " makes a best effort to cancel the job, but success is not"]
#[doc = " guaranteed. Clients can use [JobService.GetHyperparameterTuningJob][google.cloud.aiplatform.v1.JobService.GetHyperparameterTuningJob] or"]
#[doc = " other methods to check whether the cancellation succeeded or whether the"]
#[doc = " job completed despite cancellation. On successful cancellation,"]
#[doc = " the HyperparameterTuningJob is not deleted; instead it becomes a job with"]
#[doc = " a [HyperparameterTuningJob.error][google.cloud.aiplatform.v1.HyperparameterTuningJob.error] value with a [google.rpc.Status.code][google.rpc.Status.code]"]
#[doc = " of 1, corresponding to `Code.CANCELLED`, and"]
#[doc = " [HyperparameterTuningJob.state][google.cloud.aiplatform.v1.HyperparameterTuningJob.state] is set to `CANCELLED`."]
pub async fn cancel_hyperparameter_tuning_job(
&mut self,
request: impl tonic::IntoRequest<super::CancelHyperparameterTuningJobRequest>,
) -> Result<tonic::Response<()>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/CancelHyperparameterTuningJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Creates a BatchPredictionJob. A BatchPredictionJob once created will"]
#[doc = " right away be attempted to start."]
pub async fn create_batch_prediction_job(
&mut self,
request: impl tonic::IntoRequest<super::CreateBatchPredictionJobRequest>,
) -> Result<tonic::Response<super::BatchPredictionJob>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/CreateBatchPredictionJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets a BatchPredictionJob"]
pub async fn get_batch_prediction_job(
&mut self,
request: impl tonic::IntoRequest<super::GetBatchPredictionJobRequest>,
) -> Result<tonic::Response<super::BatchPredictionJob>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/GetBatchPredictionJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists BatchPredictionJobs in a Location."]
pub async fn list_batch_prediction_jobs(
&mut self,
request: impl tonic::IntoRequest<super::ListBatchPredictionJobsRequest>,
) -> Result<tonic::Response<super::ListBatchPredictionJobsResponse>, tonic::Status>
{
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/ListBatchPredictionJobs",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a BatchPredictionJob. Can only be called on jobs that already"]
#[doc = " finished."]
pub async fn delete_batch_prediction_job(
&mut self,
request: impl tonic::IntoRequest<super::DeleteBatchPredictionJobRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/DeleteBatchPredictionJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Cancels a BatchPredictionJob."]
#[doc = ""]
#[doc = " Starts asynchronous cancellation on the BatchPredictionJob. The server"]
#[doc = " makes the best effort to cancel the job, but success is not"]
#[doc = " guaranteed. Clients can use [JobService.GetBatchPredictionJob][google.cloud.aiplatform.v1.JobService.GetBatchPredictionJob] or"]
#[doc = " other methods to check whether the cancellation succeeded or whether the"]
#[doc = " job completed despite cancellation. On a successful cancellation,"]
#[doc = " the BatchPredictionJob is not deleted;instead its"]
#[doc = " [BatchPredictionJob.state][google.cloud.aiplatform.v1.BatchPredictionJob.state] is set to `CANCELLED`. Any files already"]
#[doc = " outputted by the job are not deleted."]
pub async fn cancel_batch_prediction_job(
&mut self,
request: impl tonic::IntoRequest<super::CancelBatchPredictionJobRequest>,
) -> Result<tonic::Response<()>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/CancelBatchPredictionJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Creates a ModelDeploymentMonitoringJob. It will run periodically on a"]
#[doc = " configured interval."]
pub async fn create_model_deployment_monitoring_job(
&mut self,
request: impl tonic::IntoRequest<super::CreateModelDeploymentMonitoringJobRequest>,
) -> Result<tonic::Response<super::ModelDeploymentMonitoringJob>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/CreateModelDeploymentMonitoringJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Searches Model Monitoring Statistics generated within a given time window."]
pub async fn search_model_deployment_monitoring_stats_anomalies(
&mut self,
request: impl tonic::IntoRequest<
super::SearchModelDeploymentMonitoringStatsAnomaliesRequest,
>,
) -> Result<
tonic::Response<super::SearchModelDeploymentMonitoringStatsAnomaliesResponse>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http :: uri :: PathAndQuery :: from_static ("/google.cloud.aiplatform.v1.JobService/SearchModelDeploymentMonitoringStatsAnomalies") ;
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets a ModelDeploymentMonitoringJob."]
pub async fn get_model_deployment_monitoring_job(
&mut self,
request: impl tonic::IntoRequest<super::GetModelDeploymentMonitoringJobRequest>,
) -> Result<tonic::Response<super::ModelDeploymentMonitoringJob>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/GetModelDeploymentMonitoringJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists ModelDeploymentMonitoringJobs in a Location."]
pub async fn list_model_deployment_monitoring_jobs(
&mut self,
request: impl tonic::IntoRequest<super::ListModelDeploymentMonitoringJobsRequest>,
) -> Result<tonic::Response<super::ListModelDeploymentMonitoringJobsResponse>, tonic::Status>
{
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/ListModelDeploymentMonitoringJobs",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates a ModelDeploymentMonitoringJob."]
pub async fn update_model_deployment_monitoring_job(
&mut self,
request: impl tonic::IntoRequest<super::UpdateModelDeploymentMonitoringJobRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/UpdateModelDeploymentMonitoringJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a ModelDeploymentMonitoringJob."]
pub async fn delete_model_deployment_monitoring_job(
&mut self,
request: impl tonic::IntoRequest<super::DeleteModelDeploymentMonitoringJobRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/DeleteModelDeploymentMonitoringJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Pauses a ModelDeploymentMonitoringJob. If the job is running, the server"]
#[doc = " makes a best effort to cancel the job. Will mark"]
#[doc = " [ModelDeploymentMonitoringJob.state][google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob.state] to 'PAUSED'."]
pub async fn pause_model_deployment_monitoring_job(
&mut self,
request: impl tonic::IntoRequest<super::PauseModelDeploymentMonitoringJobRequest>,
) -> Result<tonic::Response<()>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/PauseModelDeploymentMonitoringJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Resumes a paused ModelDeploymentMonitoringJob. It will start to run from"]
#[doc = " next scheduled time. A deleted ModelDeploymentMonitoringJob can't be"]
#[doc = " resumed."]
pub async fn resume_model_deployment_monitoring_job(
&mut self,
request: impl tonic::IntoRequest<super::ResumeModelDeploymentMonitoringJobRequest>,
) -> Result<tonic::Response<()>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.JobService/ResumeModelDeploymentMonitoringJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
}
/// A subgraph of the overall lineage graph. Event edges connect Artifact and
/// Execution nodes.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct LineageSubgraph {
/// The Artifact nodes in the subgraph.
#[prost(message, repeated, tag = "1")]
pub artifacts: ::prost::alloc::vec::Vec<Artifact>,
/// The Execution nodes in the subgraph.
#[prost(message, repeated, tag = "2")]
pub executions: ::prost::alloc::vec::Vec<Execution>,
/// The Event edges between Artifacts and Executions in the subgraph.
#[prost(message, repeated, tag = "3")]
pub events: ::prost::alloc::vec::Vec<Event>,
}
/// Instance of a general MetadataSchema.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MetadataSchema {
/// Output only. The resource name of the MetadataSchema.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// The version of the MetadataSchema. The version's format must match
/// the following regular expression: `^\[0-9]+[.][0-9]+[.][0-9\]+$`, which would
/// allow to order/compare different versions. Example: 1.0.0, 1.0.1, etc.
#[prost(string, tag = "2")]
pub schema_version: ::prost::alloc::string::String,
/// Required. The raw YAML string representation of the MetadataSchema. The combination
/// of \[MetadataSchema.version\] and the schema name given by `title` in
/// \[MetadataSchema.schema\] must be unique within a MetadataStore.
///
/// The schema is defined as an OpenAPI 3.0.2
/// [MetadataSchema
/// Object](<https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#schemaObject>)
#[prost(string, tag = "3")]
pub schema: ::prost::alloc::string::String,
/// The type of the MetadataSchema. This is a property that identifies which
/// metadata types will use the MetadataSchema.
#[prost(enumeration = "metadata_schema::MetadataSchemaType", tag = "4")]
pub schema_type: i32,
/// Output only. Timestamp when this MetadataSchema was created.
#[prost(message, optional, tag = "5")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Description of the Metadata Schema
#[prost(string, tag = "6")]
pub description: ::prost::alloc::string::String,
}
/// Nested message and enum types in `MetadataSchema`.
pub mod metadata_schema {
/// Describes the type of the MetadataSchema.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum MetadataSchemaType {
/// Unspecified type for the MetadataSchema.
Unspecified = 0,
/// A type indicating that the MetadataSchema will be used by Artifacts.
ArtifactType = 1,
/// A typee indicating that the MetadataSchema will be used by Executions.
ExecutionType = 2,
/// A state indicating that the MetadataSchema will be used by Contexts.
ContextType = 3,
}
}
/// Instance of a metadata store. Contains a set of metadata that can be
/// queried.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MetadataStore {
/// Output only. The resource name of the MetadataStore instance.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Output only. Timestamp when this MetadataStore was created.
#[prost(message, optional, tag = "3")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this MetadataStore was last updated.
#[prost(message, optional, tag = "4")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// Customer-managed encryption key spec for a Metadata Store. If set, this
/// Metadata Store and all sub-resources of this Metadata Store are secured
/// using this key.
#[prost(message, optional, tag = "5")]
pub encryption_spec: ::core::option::Option<EncryptionSpec>,
/// Description of the MetadataStore.
#[prost(string, tag = "6")]
pub description: ::prost::alloc::string::String,
/// Output only. State information of the MetadataStore.
#[prost(message, optional, tag = "7")]
pub state: ::core::option::Option<metadata_store::MetadataStoreState>,
}
/// Nested message and enum types in `MetadataStore`.
pub mod metadata_store {
/// Represents state information for a MetadataStore.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MetadataStoreState {
/// The disk utilization of the MetadataStore in bytes.
#[prost(int64, tag = "1")]
pub disk_utilization_bytes: i64,
}
}
/// Request message for \[MetadataService.CreateMetadataStore][google.cloud.aiplatform.v1.MetadataService.CreateMetadataStore\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateMetadataStoreRequest {
/// Required. The resource name of the Location where the MetadataStore should
/// be created.
/// Format: `projects/{project}/locations/{location}/`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The MetadataStore to create.
#[prost(message, optional, tag = "2")]
pub metadata_store: ::core::option::Option<MetadataStore>,
/// The {metadatastore} portion of the resource name with the format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}`
/// If not provided, the MetadataStore's ID will be a UUID generated by the
/// service.
/// Must be 4-128 characters in length. Valid characters are `/\[a-z][0-9\]-/`.
/// Must be unique across all MetadataStores in the parent Location.
/// (Otherwise the request will fail with ALREADY_EXISTS, or PERMISSION_DENIED
/// if the caller can't view the preexisting MetadataStore.)
#[prost(string, tag = "3")]
pub metadata_store_id: ::prost::alloc::string::String,
}
/// Details of operations that perform \[MetadataService.CreateMetadataStore][google.cloud.aiplatform.v1.MetadataService.CreateMetadataStore\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateMetadataStoreOperationMetadata {
/// Operation metadata for creating a MetadataStore.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
/// Request message for \[MetadataService.GetMetadataStore][google.cloud.aiplatform.v1.MetadataService.GetMetadataStore\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetMetadataStoreRequest {
/// Required. The resource name of the MetadataStore to retrieve.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[MetadataService.ListMetadataStores][google.cloud.aiplatform.v1.MetadataService.ListMetadataStores\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListMetadataStoresRequest {
/// Required. The Location whose MetadataStores should be listed.
/// Format:
/// `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The maximum number of Metadata Stores to return. The service may return
/// fewer.
/// Must be in range 1-1000, inclusive. Defaults to 100.
#[prost(int32, tag = "2")]
pub page_size: i32,
/// A page token, received from a previous
/// \[MetadataService.ListMetadataStores][google.cloud.aiplatform.v1.MetadataService.ListMetadataStores\] call. Provide this to retrieve the
/// subsequent page.
///
/// When paginating, all other provided parameters must match the call that
/// provided the page token. (Otherwise the request will fail with
/// INVALID_ARGUMENT error.)
#[prost(string, tag = "3")]
pub page_token: ::prost::alloc::string::String,
}
/// Response message for \[MetadataService.ListMetadataStores][google.cloud.aiplatform.v1.MetadataService.ListMetadataStores\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListMetadataStoresResponse {
/// The MetadataStores found for the Location.
#[prost(message, repeated, tag = "1")]
pub metadata_stores: ::prost::alloc::vec::Vec<MetadataStore>,
/// A token, which can be sent as
/// \[ListMetadataStoresRequest.page_token][google.cloud.aiplatform.v1.ListMetadataStoresRequest.page_token\] to retrieve the next
/// page. If this field is not populated, there are no subsequent pages.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[MetadataService.DeleteMetadataStore][google.cloud.aiplatform.v1.MetadataService.DeleteMetadataStore\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteMetadataStoreRequest {
/// Required. The resource name of the MetadataStore to delete.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Deprecated: Field is no longer supported.
#[deprecated]
#[prost(bool, tag = "2")]
pub force: bool,
}
/// Details of operations that perform \[MetadataService.DeleteMetadataStore][google.cloud.aiplatform.v1.MetadataService.DeleteMetadataStore\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteMetadataStoreOperationMetadata {
/// Operation metadata for deleting a MetadataStore.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
/// Request message for \[MetadataService.CreateArtifact][google.cloud.aiplatform.v1.MetadataService.CreateArtifact\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateArtifactRequest {
/// Required. The resource name of the MetadataStore where the Artifact should
/// be created.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The Artifact to create.
#[prost(message, optional, tag = "2")]
pub artifact: ::core::option::Option<Artifact>,
/// The {artifact} portion of the resource name with the format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/artifacts/{artifact}`
/// If not provided, the Artifact's ID will be a UUID generated by the service.
/// Must be 4-128 characters in length. Valid characters are `/\[a-z][0-9\]-/`.
/// Must be unique across all Artifacts in the parent MetadataStore. (Otherwise
/// the request will fail with ALREADY_EXISTS, or PERMISSION_DENIED if the
/// caller can't view the preexisting Artifact.)
#[prost(string, tag = "3")]
pub artifact_id: ::prost::alloc::string::String,
}
/// Request message for \[MetadataService.GetArtifact][google.cloud.aiplatform.v1.MetadataService.GetArtifact\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetArtifactRequest {
/// Required. The resource name of the Artifact to retrieve.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/artifacts/{artifact}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[MetadataService.ListArtifacts][google.cloud.aiplatform.v1.MetadataService.ListArtifacts\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListArtifactsRequest {
/// Required. The MetadataStore whose Artifacts should be listed.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The maximum number of Artifacts to return. The service may return fewer.
/// Must be in range 1-1000, inclusive. Defaults to 100.
#[prost(int32, tag = "2")]
pub page_size: i32,
/// A page token, received from a previous \[MetadataService.ListArtifacts][google.cloud.aiplatform.v1.MetadataService.ListArtifacts\]
/// call. Provide this to retrieve the subsequent page.
///
/// When paginating, all other provided parameters must match the call that
/// provided the page token. (Otherwise the request will fail with
/// INVALID_ARGUMENT error.)
#[prost(string, tag = "3")]
pub page_token: ::prost::alloc::string::String,
/// Filter specifying the boolean condition for the Artifacts to satisfy in
/// order to be part of the result set.
/// The syntax to define filter query is based on <https://google.aip.dev/160.>
/// The supported set of filters include the following:
///
/// * **Attribute filtering**:
/// For example: `display_name = "test"`.
/// Supported fields include: `name`, `display_name`, `uri`, `state`,
/// `schema_title`, `create_time`, and `update_time`.
/// Time fields, such as `create_time` and `update_time`, require values
/// specified in RFC-3339 format.
/// For example: `create_time = "2020-11-19T11:30:00-04:00"`
/// * **Metadata field**:
/// To filter on metadata fields use traversal operation as follows:
/// `metadata.<field_name>.<type_value>`.
/// For example: `metadata.field_1.number_value = 10.0`
/// * **Context based filtering**:
/// To filter Artifacts based on the contexts to which they belong, use the
/// function operator with the full resource name
/// `in_context(<context-name>)`.
/// For example:
/// `in_context("projects/<project_number>/locations/<location>/metadataStores/<metadatastore_name>/contexts/<context-id>")`
///
/// Each of the above supported filter types can be combined together using
/// logical operators (`AND` & `OR`).
///
/// For example: `display_name = "test" AND metadata.field1.bool_value = true`.
#[prost(string, tag = "4")]
pub filter: ::prost::alloc::string::String,
}
/// Response message for \[MetadataService.ListArtifacts][google.cloud.aiplatform.v1.MetadataService.ListArtifacts\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListArtifactsResponse {
/// The Artifacts retrieved from the MetadataStore.
#[prost(message, repeated, tag = "1")]
pub artifacts: ::prost::alloc::vec::Vec<Artifact>,
/// A token, which can be sent as \[ListArtifactsRequest.page_token][google.cloud.aiplatform.v1.ListArtifactsRequest.page_token\]
/// to retrieve the next page.
/// If this field is not populated, there are no subsequent pages.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[MetadataService.UpdateArtifact][google.cloud.aiplatform.v1.MetadataService.UpdateArtifact\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateArtifactRequest {
/// Required. The Artifact containing updates.
/// The Artifact's \[Artifact.name][google.cloud.aiplatform.v1.Artifact.name\] field is used to identify the Artifact to
/// be updated.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/artifacts/{artifact}`
#[prost(message, optional, tag = "1")]
pub artifact: ::core::option::Option<Artifact>,
/// Required. A FieldMask indicating which fields should be updated.
/// Functionality of this field is not yet supported.
#[prost(message, optional, tag = "2")]
pub update_mask: ::core::option::Option<::prost_types::FieldMask>,
/// If set to true, and the \[Artifact][google.cloud.aiplatform.v1.Artifact\] is not found, a new \[Artifact][google.cloud.aiplatform.v1.Artifact\] is
/// created.
#[prost(bool, tag = "3")]
pub allow_missing: bool,
}
/// Request message for \[MetadataService.DeleteArtifact][google.cloud.aiplatform.v1.MetadataService.DeleteArtifact\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteArtifactRequest {
/// Required. The resource name of the Artifact to delete.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/artifacts/{artifact}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Optional. The etag of the Artifact to delete.
/// If this is provided, it must match the server's etag. Otherwise, the
/// request will fail with a FAILED_PRECONDITION.
#[prost(string, tag = "2")]
pub etag: ::prost::alloc::string::String,
}
/// Request message for \[MetadataService.PurgeArtifacts][google.cloud.aiplatform.v1.MetadataService.PurgeArtifacts\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PurgeArtifactsRequest {
/// Required. The metadata store to purge Artifacts from.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. A required filter matching the Artifacts to be purged.
/// E.g., `update_time <= 2020-11-19T11:30:00-04:00`.
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// Optional. Flag to indicate to actually perform the purge.
/// If `force` is set to false, the method will return a sample of
/// Artifact names that would be deleted.
#[prost(bool, tag = "3")]
pub force: bool,
}
/// Response message for \[MetadataService.PurgeArtifacts][google.cloud.aiplatform.v1.MetadataService.PurgeArtifacts\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PurgeArtifactsResponse {
/// The number of Artifacts that this request deleted (or, if `force` is false,
/// the number of Artifacts that will be deleted). This can be an estimate.
#[prost(int64, tag = "1")]
pub purge_count: i64,
/// A sample of the Artifact names that will be deleted.
/// Only populated if `force` is set to false. The maximum number of samples is
/// 100 (it is possible to return fewer).
#[prost(string, repeated, tag = "2")]
pub purge_sample: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
/// Details of operations that perform \[MetadataService.PurgeArtifacts][google.cloud.aiplatform.v1.MetadataService.PurgeArtifacts\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PurgeArtifactsMetadata {
/// Operation metadata for purging Artifacts.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
/// Request message for \[MetadataService.CreateContext][google.cloud.aiplatform.v1.MetadataService.CreateContext\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateContextRequest {
/// Required. The resource name of the MetadataStore where the Context should be
/// created.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The Context to create.
#[prost(message, optional, tag = "2")]
pub context: ::core::option::Option<Context>,
/// The {context} portion of the resource name with the format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/contexts/{context}`.
/// If not provided, the Context's ID will be a UUID generated by the service.
/// Must be 4-128 characters in length. Valid characters are `/\[a-z][0-9\]-/`.
/// Must be unique across all Contexts in the parent MetadataStore. (Otherwise
/// the request will fail with ALREADY_EXISTS, or PERMISSION_DENIED if the
/// caller can't view the preexisting Context.)
#[prost(string, tag = "3")]
pub context_id: ::prost::alloc::string::String,
}
/// Request message for \[MetadataService.GetContext][google.cloud.aiplatform.v1.MetadataService.GetContext\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetContextRequest {
/// Required. The resource name of the Context to retrieve.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/contexts/{context}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[MetadataService.ListContexts][google.cloud.aiplatform.v1.MetadataService.ListContexts\]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListContextsRequest {
/// Required. The MetadataStore whose Contexts should be listed.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The maximum number of Contexts to return. The service may return fewer.
/// Must be in range 1-1000, inclusive. Defaults to 100.
#[prost(int32, tag = "2")]
pub page_size: i32,
/// A page token, received from a previous \[MetadataService.ListContexts][google.cloud.aiplatform.v1.MetadataService.ListContexts\]
/// call. Provide this to retrieve the subsequent page.
///
/// When paginating, all other provided parameters must match the call that
/// provided the page token. (Otherwise the request will fail with
/// INVALID_ARGUMENT error.)
#[prost(string, tag = "3")]
pub page_token: ::prost::alloc::string::String,
/// Filter specifying the boolean condition for the Contexts to satisfy in
/// order to be part of the result set.
/// The syntax to define filter query is based on <https://google.aip.dev/160.>
/// Following are the supported set of filters:
///
/// * **Attribute filtering**:
/// For example: `display_name = "test"`.
/// Supported fields include: `name`, `display_name`, `schema_title`,
/// `create_time`, and `update_time`.
/// Time fields, such as `create_time` and `update_time`, require values
/// specified in RFC-3339 format.
/// For example: `create_time = "2020-11-19T11:30:00-04:00"`.
/// * **Metadata field**:
/// To filter on metadata fields use traversal operation as follows:
/// `metadata.<field_name>.<type_value>`.
/// For example: `metadata.field_1.number_value = 10.0`.
/// * **Parent Child filtering**:
/// To filter Contexts based on parent-child relationship use the HAS
/// operator as follows:
///
/// ```
/// parent_contexts:
/// "projects/<project_number>/locations/<location>/metadataStores/<metadatastore_name>/contexts/<context_id>"
/// child_contexts:
/// "projects/<project_number>/locations/<location>/metadataStores/<metadatastore_name>/contexts/<context_id>"
/// ```
///
/// Each of the above supported filters can be combined together using
/// logical operators (`AND` & `OR`).
///
/// For example: `display_name = "test" AND metadata.field1.bool_value = true`.
#[prost(string, tag = "4")]
pub filter: ::prost::alloc::string::String,
}
/// Response message for \[MetadataService.ListContexts][google.cloud.aiplatform.v1.MetadataService.ListContexts\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListContextsResponse {
/// The Contexts retrieved from the MetadataStore.
#[prost(message, repeated, tag = "1")]
pub contexts: ::prost::alloc::vec::Vec<Context>,
/// A token, which can be sent as \[ListContextsRequest.page_token][google.cloud.aiplatform.v1.ListContextsRequest.page_token\]
/// to retrieve the next page.
/// If this field is not populated, there are no subsequent pages.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[MetadataService.UpdateContext][google.cloud.aiplatform.v1.MetadataService.UpdateContext\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateContextRequest {
/// Required. The Context containing updates.
/// The Context's \[Context.name][google.cloud.aiplatform.v1.Context.name\] field is used to identify the Context to be
/// updated.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/contexts/{context}`
#[prost(message, optional, tag = "1")]
pub context: ::core::option::Option<Context>,
/// Required. A FieldMask indicating which fields should be updated.
/// Functionality of this field is not yet supported.
#[prost(message, optional, tag = "2")]
pub update_mask: ::core::option::Option<::prost_types::FieldMask>,
/// If set to true, and the \[Context][google.cloud.aiplatform.v1.Context\] is not found, a new \[Context][google.cloud.aiplatform.v1.Context\] is
/// created.
#[prost(bool, tag = "3")]
pub allow_missing: bool,
}
/// Request message for \[MetadataService.DeleteContext][google.cloud.aiplatform.v1.MetadataService.DeleteContext\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteContextRequest {
/// Required. The resource name of the Context to delete.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/contexts/{context}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// The force deletion semantics is still undefined.
/// Users should not use this field.
#[prost(bool, tag = "2")]
pub force: bool,
/// Optional. The etag of the Context to delete.
/// If this is provided, it must match the server's etag. Otherwise, the
/// request will fail with a FAILED_PRECONDITION.
#[prost(string, tag = "3")]
pub etag: ::prost::alloc::string::String,
}
/// Request message for \[MetadataService.PurgeContexts][google.cloud.aiplatform.v1.MetadataService.PurgeContexts\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PurgeContextsRequest {
/// Required. The metadata store to purge Contexts from.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. A required filter matching the Contexts to be purged.
/// E.g., `update_time <= 2020-11-19T11:30:00-04:00`.
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// Optional. Flag to indicate to actually perform the purge.
/// If `force` is set to false, the method will return a sample of
/// Context names that would be deleted.
#[prost(bool, tag = "3")]
pub force: bool,
}
/// Response message for \[MetadataService.PurgeContexts][google.cloud.aiplatform.v1.MetadataService.PurgeContexts\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PurgeContextsResponse {
/// The number of Contexts that this request deleted (or, if `force` is false,
/// the number of Contexts that will be deleted). This can be an estimate.
#[prost(int64, tag = "1")]
pub purge_count: i64,
/// A sample of the Context names that will be deleted.
/// Only populated if `force` is set to false. The maximum number of samples is
/// 100 (it is possible to return fewer).
#[prost(string, repeated, tag = "2")]
pub purge_sample: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
/// Details of operations that perform \[MetadataService.PurgeContexts][google.cloud.aiplatform.v1.MetadataService.PurgeContexts\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PurgeContextsMetadata {
/// Operation metadata for purging Contexts.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
/// Request message for \[MetadataService.AddContextArtifactsAndExecutions][google.cloud.aiplatform.v1.MetadataService.AddContextArtifactsAndExecutions\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AddContextArtifactsAndExecutionsRequest {
/// Required. The resource name of the Context that the Artifacts and Executions
/// belong to.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/contexts/{context}`
#[prost(string, tag = "1")]
pub context: ::prost::alloc::string::String,
/// The resource names of the Artifacts to attribute to the Context.
///
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/artifacts/{artifact}`
#[prost(string, repeated, tag = "2")]
pub artifacts: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// The resource names of the Executions to associate with the
/// Context.
///
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/executions/{execution}`
#[prost(string, repeated, tag = "3")]
pub executions: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
/// Response message for \[MetadataService.AddContextArtifactsAndExecutions][google.cloud.aiplatform.v1.MetadataService.AddContextArtifactsAndExecutions\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AddContextArtifactsAndExecutionsResponse {}
/// Request message for \[MetadataService.AddContextChildren][google.cloud.aiplatform.v1.MetadataService.AddContextChildren\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AddContextChildrenRequest {
/// Required. The resource name of the parent Context.
///
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/contexts/{context}`
#[prost(string, tag = "1")]
pub context: ::prost::alloc::string::String,
/// The resource names of the child Contexts.
#[prost(string, repeated, tag = "2")]
pub child_contexts: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
/// Response message for \[MetadataService.AddContextChildren][google.cloud.aiplatform.v1.MetadataService.AddContextChildren\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AddContextChildrenResponse {}
/// Request message for \[MetadataService.QueryContextLineageSubgraph][google.cloud.aiplatform.v1.MetadataService.QueryContextLineageSubgraph\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct QueryContextLineageSubgraphRequest {
/// Required. The resource name of the Context whose Artifacts and Executions
/// should be retrieved as a LineageSubgraph.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/contexts/{context}`
///
/// The request may error with FAILED_PRECONDITION if the number of Artifacts,
/// the number of Executions, or the number of Events that would be returned
/// for the Context exceeds 1000.
#[prost(string, tag = "1")]
pub context: ::prost::alloc::string::String,
}
/// Request message for \[MetadataService.CreateExecution][google.cloud.aiplatform.v1.MetadataService.CreateExecution\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateExecutionRequest {
/// Required. The resource name of the MetadataStore where the Execution should
/// be created.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The Execution to create.
#[prost(message, optional, tag = "2")]
pub execution: ::core::option::Option<Execution>,
/// The {execution} portion of the resource name with the format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/executions/{execution}`
/// If not provided, the Execution's ID will be a UUID generated by the
/// service.
/// Must be 4-128 characters in length. Valid characters are `/\[a-z][0-9\]-/`.
/// Must be unique across all Executions in the parent MetadataStore.
/// (Otherwise the request will fail with ALREADY_EXISTS, or PERMISSION_DENIED
/// if the caller can't view the preexisting Execution.)
#[prost(string, tag = "3")]
pub execution_id: ::prost::alloc::string::String,
}
/// Request message for \[MetadataService.GetExecution][google.cloud.aiplatform.v1.MetadataService.GetExecution\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetExecutionRequest {
/// Required. The resource name of the Execution to retrieve.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/executions/{execution}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[MetadataService.ListExecutions][google.cloud.aiplatform.v1.MetadataService.ListExecutions\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListExecutionsRequest {
/// Required. The MetadataStore whose Executions should be listed.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The maximum number of Executions to return. The service may return fewer.
/// Must be in range 1-1000, inclusive. Defaults to 100.
#[prost(int32, tag = "2")]
pub page_size: i32,
/// A page token, received from a previous \[MetadataService.ListExecutions][google.cloud.aiplatform.v1.MetadataService.ListExecutions\]
/// call. Provide this to retrieve the subsequent page.
///
/// When paginating, all other provided parameters must match the call that
/// provided the page token. (Otherwise the request will fail with an
/// INVALID_ARGUMENT error.)
#[prost(string, tag = "3")]
pub page_token: ::prost::alloc::string::String,
/// Filter specifying the boolean condition for the Executions to satisfy in
/// order to be part of the result set.
/// The syntax to define filter query is based on <https://google.aip.dev/160.>
/// Following are the supported set of filters:
///
/// * **Attribute filtering**:
/// For example: `display_name = "test"`.
/// Supported fields include: `name`, `display_name`, `state`,
/// `schema_title`, `create_time`, and `update_time`.
/// Time fields, such as `create_time` and `update_time`, require values
/// specified in RFC-3339 format.
/// For example: `create_time = "2020-11-19T11:30:00-04:00"`.
/// * **Metadata field**:
/// To filter on metadata fields use traversal operation as follows:
/// `metadata.<field_name>.<type_value>`
/// For example: `metadata.field_1.number_value = 10.0`
/// * **Context based filtering**:
/// To filter Executions based on the contexts to which they belong use
/// the function operator with the full resource name:
/// `in_context(<context-name>)`.
/// For example:
/// `in_context("projects/<project_number>/locations/<location>/metadataStores/<metadatastore_name>/contexts/<context-id>")`
///
/// Each of the above supported filters can be combined together using
/// logical operators (`AND` & `OR`).
/// For example: `display_name = "test" AND metadata.field1.bool_value = true`.
#[prost(string, tag = "4")]
pub filter: ::prost::alloc::string::String,
}
/// Response message for \[MetadataService.ListExecutions][google.cloud.aiplatform.v1.MetadataService.ListExecutions\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListExecutionsResponse {
/// The Executions retrieved from the MetadataStore.
#[prost(message, repeated, tag = "1")]
pub executions: ::prost::alloc::vec::Vec<Execution>,
/// A token, which can be sent as \[ListExecutionsRequest.page_token][google.cloud.aiplatform.v1.ListExecutionsRequest.page_token\]
/// to retrieve the next page.
/// If this field is not populated, there are no subsequent pages.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[MetadataService.UpdateExecution][google.cloud.aiplatform.v1.MetadataService.UpdateExecution\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateExecutionRequest {
/// Required. The Execution containing updates.
/// The Execution's \[Execution.name][google.cloud.aiplatform.v1.Execution.name\] field is used to identify the Execution
/// to be updated.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/executions/{execution}`
#[prost(message, optional, tag = "1")]
pub execution: ::core::option::Option<Execution>,
/// Required. A FieldMask indicating which fields should be updated.
/// Functionality of this field is not yet supported.
#[prost(message, optional, tag = "2")]
pub update_mask: ::core::option::Option<::prost_types::FieldMask>,
/// If set to true, and the \[Execution][google.cloud.aiplatform.v1.Execution\] is not found, a new \[Execution][google.cloud.aiplatform.v1.Execution\]
/// is created.
#[prost(bool, tag = "3")]
pub allow_missing: bool,
}
/// Request message for \[MetadataService.DeleteExecution][google.cloud.aiplatform.v1.MetadataService.DeleteExecution\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteExecutionRequest {
/// Required. The resource name of the Execution to delete.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/executions/{execution}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Optional. The etag of the Execution to delete.
/// If this is provided, it must match the server's etag. Otherwise, the
/// request will fail with a FAILED_PRECONDITION.
#[prost(string, tag = "2")]
pub etag: ::prost::alloc::string::String,
}
/// Request message for \[MetadataService.PurgeExecutions][google.cloud.aiplatform.v1.MetadataService.PurgeExecutions\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PurgeExecutionsRequest {
/// Required. The metadata store to purge Executions from.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. A required filter matching the Executions to be purged.
/// E.g., `update_time <= 2020-11-19T11:30:00-04:00`.
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// Optional. Flag to indicate to actually perform the purge.
/// If `force` is set to false, the method will return a sample of
/// Execution names that would be deleted.
#[prost(bool, tag = "3")]
pub force: bool,
}
/// Response message for \[MetadataService.PurgeExecutions][google.cloud.aiplatform.v1.MetadataService.PurgeExecutions\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PurgeExecutionsResponse {
/// The number of Executions that this request deleted (or, if `force` is
/// false, the number of Executions that will be deleted). This can be an
/// estimate.
#[prost(int64, tag = "1")]
pub purge_count: i64,
/// A sample of the Execution names that will be deleted.
/// Only populated if `force` is set to false. The maximum number of samples is
/// 100 (it is possible to return fewer).
#[prost(string, repeated, tag = "2")]
pub purge_sample: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
/// Details of operations that perform \[MetadataService.PurgeExecutions][google.cloud.aiplatform.v1.MetadataService.PurgeExecutions\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PurgeExecutionsMetadata {
/// Operation metadata for purging Executions.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
/// Request message for \[MetadataService.AddExecutionEvents][google.cloud.aiplatform.v1.MetadataService.AddExecutionEvents\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AddExecutionEventsRequest {
/// Required. The resource name of the Execution that the Events connect
/// Artifacts with.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/executions/{execution}`
#[prost(string, tag = "1")]
pub execution: ::prost::alloc::string::String,
/// The Events to create and add.
#[prost(message, repeated, tag = "2")]
pub events: ::prost::alloc::vec::Vec<Event>,
}
/// Response message for \[MetadataService.AddExecutionEvents][google.cloud.aiplatform.v1.MetadataService.AddExecutionEvents\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AddExecutionEventsResponse {}
/// Request message for \[MetadataService.QueryExecutionInputsAndOutputs][google.cloud.aiplatform.v1.MetadataService.QueryExecutionInputsAndOutputs\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct QueryExecutionInputsAndOutputsRequest {
/// Required. The resource name of the Execution whose input and output Artifacts should
/// be retrieved as a LineageSubgraph.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/executions/{execution}`
#[prost(string, tag = "1")]
pub execution: ::prost::alloc::string::String,
}
/// Request message for \[MetadataService.CreateMetadataSchema][google.cloud.aiplatform.v1.MetadataService.CreateMetadataSchema\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateMetadataSchemaRequest {
/// Required. The resource name of the MetadataStore where the MetadataSchema should
/// be created.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The MetadataSchema to create.
#[prost(message, optional, tag = "2")]
pub metadata_schema: ::core::option::Option<MetadataSchema>,
/// The {metadata_schema} portion of the resource name with the format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/metadataSchemas/{metadataschema}`
/// If not provided, the MetadataStore's ID will be a UUID generated by the
/// service.
/// Must be 4-128 characters in length. Valid characters are `/\[a-z][0-9\]-/`.
/// Must be unique across all MetadataSchemas in the parent Location.
/// (Otherwise the request will fail with ALREADY_EXISTS, or PERMISSION_DENIED
/// if the caller can't view the preexisting MetadataSchema.)
#[prost(string, tag = "3")]
pub metadata_schema_id: ::prost::alloc::string::String,
}
/// Request message for \[MetadataService.GetMetadataSchema][google.cloud.aiplatform.v1.MetadataService.GetMetadataSchema\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetMetadataSchemaRequest {
/// Required. The resource name of the MetadataSchema to retrieve.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/metadataSchemas/{metadataschema}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[MetadataService.ListMetadataSchemas][google.cloud.aiplatform.v1.MetadataService.ListMetadataSchemas\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListMetadataSchemasRequest {
/// Required. The MetadataStore whose MetadataSchemas should be listed.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The maximum number of MetadataSchemas to return. The service may return
/// fewer.
/// Must be in range 1-1000, inclusive. Defaults to 100.
#[prost(int32, tag = "2")]
pub page_size: i32,
/// A page token, received from a previous
/// \[MetadataService.ListMetadataSchemas][google.cloud.aiplatform.v1.MetadataService.ListMetadataSchemas\] call. Provide this to retrieve the
/// next page.
///
/// When paginating, all other provided parameters must match the call that
/// provided the page token. (Otherwise the request will fail with
/// INVALID_ARGUMENT error.)
#[prost(string, tag = "3")]
pub page_token: ::prost::alloc::string::String,
/// A query to filter available MetadataSchemas for matching results.
#[prost(string, tag = "4")]
pub filter: ::prost::alloc::string::String,
}
/// Response message for \[MetadataService.ListMetadataSchemas][google.cloud.aiplatform.v1.MetadataService.ListMetadataSchemas\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListMetadataSchemasResponse {
/// The MetadataSchemas found for the MetadataStore.
#[prost(message, repeated, tag = "1")]
pub metadata_schemas: ::prost::alloc::vec::Vec<MetadataSchema>,
/// A token, which can be sent as
/// \[ListMetadataSchemasRequest.page_token][google.cloud.aiplatform.v1.ListMetadataSchemasRequest.page_token\] to retrieve the next
/// page. If this field is not populated, there are no subsequent pages.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[MetadataService.QueryArtifactLineageSubgraph][google.cloud.aiplatform.v1.MetadataService.QueryArtifactLineageSubgraph\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct QueryArtifactLineageSubgraphRequest {
/// Required. The resource name of the Artifact whose Lineage needs to be retrieved as a
/// LineageSubgraph.
/// Format:
/// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/artifacts/{artifact}`
///
/// The request may error with FAILED_PRECONDITION if the number of Artifacts,
/// the number of Executions, or the number of Events that would be returned
/// for the Context exceeds 1000.
#[prost(string, tag = "1")]
pub artifact: ::prost::alloc::string::String,
/// Specifies the size of the lineage graph in terms of number of hops from the
/// specified artifact.
/// Negative Value: INVALID_ARGUMENT error is returned
/// 0: Only input artifact is returned.
/// No value: Transitive closure is performed to return the complete graph.
#[prost(int32, tag = "2")]
pub max_hops: i32,
/// Filter specifying the boolean condition for the Artifacts to satisfy in
/// order to be part of the Lineage Subgraph.
/// The syntax to define filter query is based on <https://google.aip.dev/160.>
/// The supported set of filters include the following:
///
/// * **Attribute filtering**:
/// For example: `display_name = "test"`
/// Supported fields include: `name`, `display_name`, `uri`, `state`,
/// `schema_title`, `create_time`, and `update_time`.
/// Time fields, such as `create_time` and `update_time`, require values
/// specified in RFC-3339 format.
/// For example: `create_time = "2020-11-19T11:30:00-04:00"`
/// * **Metadata field**:
/// To filter on metadata fields use traversal operation as follows:
/// `metadata.<field_name>.<type_value>`.
/// For example: `metadata.field_1.number_value = 10.0`
///
/// Each of the above supported filter types can be combined together using
/// logical operators (`AND` & `OR`).
///
/// For example: `display_name = "test" AND metadata.field1.bool_value = true`.
#[prost(string, tag = "3")]
pub filter: ::prost::alloc::string::String,
}
#[doc = r" Generated client implementations."]
pub mod metadata_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " Service for reading and writing metadata entries."]
#[derive(Debug, Clone)]
pub struct MetadataServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> MetadataServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> MetadataServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
MetadataServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Initializes a MetadataStore, including allocation of resources."]
pub async fn create_metadata_store(
&mut self,
request: impl tonic::IntoRequest<super::CreateMetadataStoreRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/CreateMetadataStore",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Retrieves a specific MetadataStore."]
pub async fn get_metadata_store(
&mut self,
request: impl tonic::IntoRequest<super::GetMetadataStoreRequest>,
) -> Result<tonic::Response<super::MetadataStore>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/GetMetadataStore",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists MetadataStores for a Location."]
pub async fn list_metadata_stores(
&mut self,
request: impl tonic::IntoRequest<super::ListMetadataStoresRequest>,
) -> Result<tonic::Response<super::ListMetadataStoresResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/ListMetadataStores",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a single MetadataStore and all its child resources (Artifacts,"]
#[doc = " Executions, and Contexts)."]
pub async fn delete_metadata_store(
&mut self,
request: impl tonic::IntoRequest<super::DeleteMetadataStoreRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/DeleteMetadataStore",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Creates an Artifact associated with a MetadataStore."]
pub async fn create_artifact(
&mut self,
request: impl tonic::IntoRequest<super::CreateArtifactRequest>,
) -> Result<tonic::Response<super::Artifact>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/CreateArtifact",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Retrieves a specific Artifact."]
pub async fn get_artifact(
&mut self,
request: impl tonic::IntoRequest<super::GetArtifactRequest>,
) -> Result<tonic::Response<super::Artifact>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/GetArtifact",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists Artifacts in the MetadataStore."]
pub async fn list_artifacts(
&mut self,
request: impl tonic::IntoRequest<super::ListArtifactsRequest>,
) -> Result<tonic::Response<super::ListArtifactsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/ListArtifacts",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates a stored Artifact."]
pub async fn update_artifact(
&mut self,
request: impl tonic::IntoRequest<super::UpdateArtifactRequest>,
) -> Result<tonic::Response<super::Artifact>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/UpdateArtifact",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes an Artifact."]
pub async fn delete_artifact(
&mut self,
request: impl tonic::IntoRequest<super::DeleteArtifactRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/DeleteArtifact",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Purges Artifacts."]
pub async fn purge_artifacts(
&mut self,
request: impl tonic::IntoRequest<super::PurgeArtifactsRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/PurgeArtifacts",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Creates a Context associated with a MetadataStore."]
pub async fn create_context(
&mut self,
request: impl tonic::IntoRequest<super::CreateContextRequest>,
) -> Result<tonic::Response<super::Context>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/CreateContext",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Retrieves a specific Context."]
pub async fn get_context(
&mut self,
request: impl tonic::IntoRequest<super::GetContextRequest>,
) -> Result<tonic::Response<super::Context>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/GetContext",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists Contexts on the MetadataStore."]
pub async fn list_contexts(
&mut self,
request: impl tonic::IntoRequest<super::ListContextsRequest>,
) -> Result<tonic::Response<super::ListContextsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/ListContexts",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates a stored Context."]
pub async fn update_context(
&mut self,
request: impl tonic::IntoRequest<super::UpdateContextRequest>,
) -> Result<tonic::Response<super::Context>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/UpdateContext",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a stored Context."]
pub async fn delete_context(
&mut self,
request: impl tonic::IntoRequest<super::DeleteContextRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/DeleteContext",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Purges Contexts."]
pub async fn purge_contexts(
&mut self,
request: impl tonic::IntoRequest<super::PurgeContextsRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/PurgeContexts",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Adds a set of Artifacts and Executions to a Context. If any of the"]
#[doc = " Artifacts or Executions have already been added to a Context, they are"]
#[doc = " simply skipped."]
pub async fn add_context_artifacts_and_executions(
&mut self,
request: impl tonic::IntoRequest<super::AddContextArtifactsAndExecutionsRequest>,
) -> Result<tonic::Response<super::AddContextArtifactsAndExecutionsResponse>, tonic::Status>
{
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/AddContextArtifactsAndExecutions",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Adds a set of Contexts as children to a parent Context. If any of the"]
#[doc = " child Contexts have already been added to the parent Context, they are"]
#[doc = " simply skipped. If this call would create a cycle or cause any Context to"]
#[doc = " have more than 10 parents, the request will fail with an INVALID_ARGUMENT"]
#[doc = " error."]
pub async fn add_context_children(
&mut self,
request: impl tonic::IntoRequest<super::AddContextChildrenRequest>,
) -> Result<tonic::Response<super::AddContextChildrenResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/AddContextChildren",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Retrieves Artifacts and Executions within the specified Context, connected"]
#[doc = " by Event edges and returned as a LineageSubgraph."]
pub async fn query_context_lineage_subgraph(
&mut self,
request: impl tonic::IntoRequest<super::QueryContextLineageSubgraphRequest>,
) -> Result<tonic::Response<super::LineageSubgraph>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/QueryContextLineageSubgraph",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Creates an Execution associated with a MetadataStore."]
pub async fn create_execution(
&mut self,
request: impl tonic::IntoRequest<super::CreateExecutionRequest>,
) -> Result<tonic::Response<super::Execution>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/CreateExecution",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Retrieves a specific Execution."]
pub async fn get_execution(
&mut self,
request: impl tonic::IntoRequest<super::GetExecutionRequest>,
) -> Result<tonic::Response<super::Execution>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/GetExecution",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists Executions in the MetadataStore."]
pub async fn list_executions(
&mut self,
request: impl tonic::IntoRequest<super::ListExecutionsRequest>,
) -> Result<tonic::Response<super::ListExecutionsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/ListExecutions",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates a stored Execution."]
pub async fn update_execution(
&mut self,
request: impl tonic::IntoRequest<super::UpdateExecutionRequest>,
) -> Result<tonic::Response<super::Execution>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/UpdateExecution",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes an Execution."]
pub async fn delete_execution(
&mut self,
request: impl tonic::IntoRequest<super::DeleteExecutionRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/DeleteExecution",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Purges Executions."]
pub async fn purge_executions(
&mut self,
request: impl tonic::IntoRequest<super::PurgeExecutionsRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/PurgeExecutions",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Adds Events to the specified Execution. An Event indicates whether an"]
#[doc = " Artifact was used as an input or output for an Execution. If an Event"]
#[doc = " already exists between the Execution and the Artifact, the Event is"]
#[doc = " skipped."]
pub async fn add_execution_events(
&mut self,
request: impl tonic::IntoRequest<super::AddExecutionEventsRequest>,
) -> Result<tonic::Response<super::AddExecutionEventsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/AddExecutionEvents",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Obtains the set of input and output Artifacts for this Execution, in the"]
#[doc = " form of LineageSubgraph that also contains the Execution and connecting"]
#[doc = " Events."]
pub async fn query_execution_inputs_and_outputs(
&mut self,
request: impl tonic::IntoRequest<super::QueryExecutionInputsAndOutputsRequest>,
) -> Result<tonic::Response<super::LineageSubgraph>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/QueryExecutionInputsAndOutputs",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Creates a MetadataSchema."]
pub async fn create_metadata_schema(
&mut self,
request: impl tonic::IntoRequest<super::CreateMetadataSchemaRequest>,
) -> Result<tonic::Response<super::MetadataSchema>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/CreateMetadataSchema",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Retrieves a specific MetadataSchema."]
pub async fn get_metadata_schema(
&mut self,
request: impl tonic::IntoRequest<super::GetMetadataSchemaRequest>,
) -> Result<tonic::Response<super::MetadataSchema>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/GetMetadataSchema",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists MetadataSchemas."]
pub async fn list_metadata_schemas(
&mut self,
request: impl tonic::IntoRequest<super::ListMetadataSchemasRequest>,
) -> Result<tonic::Response<super::ListMetadataSchemasResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/ListMetadataSchemas",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Retrieves lineage of an Artifact represented through Artifacts and"]
#[doc = " Executions connected by Event edges and returned as a LineageSubgraph."]
pub async fn query_artifact_lineage_subgraph(
&mut self,
request: impl tonic::IntoRequest<super::QueryArtifactLineageSubgraphRequest>,
) -> Result<tonic::Response<super::LineageSubgraph>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MetadataService/QueryArtifactLineageSubgraph",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
}
/// Represents one resource that exists in automl.googleapis.com,
/// datalabeling.googleapis.com or ml.googleapis.com.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MigratableResource {
/// Output only. Timestamp when the last migration attempt on this MigratableResource
/// started. Will not be set if there's no migration attempt on this
/// MigratableResource.
#[prost(message, optional, tag = "5")]
pub last_migrate_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this MigratableResource was last updated.
#[prost(message, optional, tag = "6")]
pub last_update_time: ::core::option::Option<::prost_types::Timestamp>,
#[prost(oneof = "migratable_resource::Resource", tags = "1, 2, 3, 4")]
pub resource: ::core::option::Option<migratable_resource::Resource>,
}
/// Nested message and enum types in `MigratableResource`.
pub mod migratable_resource {
/// Represents one model Version in ml.googleapis.com.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MlEngineModelVersion {
/// The ml.googleapis.com endpoint that this model Version currently lives
/// in.
/// Example values:
///
/// * ml.googleapis.com
/// * us-centrall-ml.googleapis.com
/// * europe-west4-ml.googleapis.com
/// * asia-east1-ml.googleapis.com
#[prost(string, tag = "1")]
pub endpoint: ::prost::alloc::string::String,
/// Full resource name of ml engine model Version.
/// Format: `projects/{project}/models/{model}/versions/{version}`.
#[prost(string, tag = "2")]
pub version: ::prost::alloc::string::String,
}
/// Represents one Model in automl.googleapis.com.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AutomlModel {
/// Full resource name of automl Model.
/// Format:
/// `projects/{project}/locations/{location}/models/{model}`.
#[prost(string, tag = "1")]
pub model: ::prost::alloc::string::String,
/// The Model's display name in automl.googleapis.com.
#[prost(string, tag = "3")]
pub model_display_name: ::prost::alloc::string::String,
}
/// Represents one Dataset in automl.googleapis.com.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AutomlDataset {
/// Full resource name of automl Dataset.
/// Format:
/// `projects/{project}/locations/{location}/datasets/{dataset}`.
#[prost(string, tag = "1")]
pub dataset: ::prost::alloc::string::String,
/// The Dataset's display name in automl.googleapis.com.
#[prost(string, tag = "4")]
pub dataset_display_name: ::prost::alloc::string::String,
}
/// Represents one Dataset in datalabeling.googleapis.com.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DataLabelingDataset {
/// Full resource name of data labeling Dataset.
/// Format:
/// `projects/{project}/datasets/{dataset}`.
#[prost(string, tag = "1")]
pub dataset: ::prost::alloc::string::String,
/// The Dataset's display name in datalabeling.googleapis.com.
#[prost(string, tag = "4")]
pub dataset_display_name: ::prost::alloc::string::String,
/// The migratable AnnotatedDataset in datalabeling.googleapis.com belongs to
/// the data labeling Dataset.
#[prost(message, repeated, tag = "3")]
pub data_labeling_annotated_datasets:
::prost::alloc::vec::Vec<data_labeling_dataset::DataLabelingAnnotatedDataset>,
}
/// Nested message and enum types in `DataLabelingDataset`.
pub mod data_labeling_dataset {
/// Represents one AnnotatedDataset in datalabeling.googleapis.com.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DataLabelingAnnotatedDataset {
/// Full resource name of data labeling AnnotatedDataset.
/// Format:
/// `projects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}`.
#[prost(string, tag = "1")]
pub annotated_dataset: ::prost::alloc::string::String,
/// The AnnotatedDataset's display name in datalabeling.googleapis.com.
#[prost(string, tag = "3")]
pub annotated_dataset_display_name: ::prost::alloc::string::String,
}
}
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Resource {
/// Output only. Represents one Version in ml.googleapis.com.
#[prost(message, tag = "1")]
MlEngineModelVersion(MlEngineModelVersion),
/// Output only. Represents one Model in automl.googleapis.com.
#[prost(message, tag = "2")]
AutomlModel(AutomlModel),
/// Output only. Represents one Dataset in automl.googleapis.com.
#[prost(message, tag = "3")]
AutomlDataset(AutomlDataset),
/// Output only. Represents one Dataset in datalabeling.googleapis.com.
#[prost(message, tag = "4")]
DataLabelingDataset(DataLabelingDataset),
}
}
/// Request message for \[MigrationService.SearchMigratableResources][google.cloud.aiplatform.v1.MigrationService.SearchMigratableResources\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SearchMigratableResourcesRequest {
/// Required. The location that the migratable resources should be searched from.
/// It's the Vertex AI location that the resources can be migrated to, not
/// the resources' original location.
/// Format:
/// `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The standard page size.
/// The default and maximum value is 100.
#[prost(int32, tag = "2")]
pub page_size: i32,
/// The standard page token.
#[prost(string, tag = "3")]
pub page_token: ::prost::alloc::string::String,
/// A filter for your search. You can use the following types of filters:
///
/// * Resource type filters. The following strings filter for a specific type
/// of \[MigratableResource][google.cloud.aiplatform.v1.MigratableResource\]:
/// * `ml_engine_model_version:*`
/// * `automl_model:*`
/// * `automl_dataset:*`
/// * `data_labeling_dataset:*`
/// * "Migrated or not" filters. The following strings filter for resources
/// that either have or have not already been migrated:
/// * `last_migrate_time:*` filters for migrated resources.
/// * `NOT last_migrate_time:*` filters for not yet migrated resources.
#[prost(string, tag = "4")]
pub filter: ::prost::alloc::string::String,
}
/// Response message for \[MigrationService.SearchMigratableResources][google.cloud.aiplatform.v1.MigrationService.SearchMigratableResources\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SearchMigratableResourcesResponse {
/// All migratable resources that can be migrated to the
/// location specified in the request.
#[prost(message, repeated, tag = "1")]
pub migratable_resources: ::prost::alloc::vec::Vec<MigratableResource>,
/// The standard next-page token.
/// The migratable_resources may not fill page_size in
/// SearchMigratableResourcesRequest even when there are subsequent pages.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[MigrationService.BatchMigrateResources][google.cloud.aiplatform.v1.MigrationService.BatchMigrateResources\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BatchMigrateResourcesRequest {
/// Required. The location of the migrated resource will live in.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The request messages specifying the resources to migrate.
/// They must be in the same location as the destination.
/// Up to 50 resources can be migrated in one batch.
#[prost(message, repeated, tag = "2")]
pub migrate_resource_requests: ::prost::alloc::vec::Vec<MigrateResourceRequest>,
}
/// Config of migrating one resource from automl.googleapis.com,
/// datalabeling.googleapis.com and ml.googleapis.com to Vertex AI.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MigrateResourceRequest {
#[prost(oneof = "migrate_resource_request::Request", tags = "1, 2, 3, 4")]
pub request: ::core::option::Option<migrate_resource_request::Request>,
}
/// Nested message and enum types in `MigrateResourceRequest`.
pub mod migrate_resource_request {
/// Config for migrating version in ml.googleapis.com to Vertex AI's Model.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MigrateMlEngineModelVersionConfig {
/// Required. The ml.googleapis.com endpoint that this model version should be migrated
/// from.
/// Example values:
///
/// * ml.googleapis.com
///
/// * us-centrall-ml.googleapis.com
///
/// * europe-west4-ml.googleapis.com
///
/// * asia-east1-ml.googleapis.com
#[prost(string, tag = "1")]
pub endpoint: ::prost::alloc::string::String,
/// Required. Full resource name of ml engine model version.
/// Format: `projects/{project}/models/{model}/versions/{version}`.
#[prost(string, tag = "2")]
pub model_version: ::prost::alloc::string::String,
/// Required. Display name of the model in Vertex AI.
/// System will pick a display name if unspecified.
#[prost(string, tag = "3")]
pub model_display_name: ::prost::alloc::string::String,
}
/// Config for migrating Model in automl.googleapis.com to Vertex AI's Model.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MigrateAutomlModelConfig {
/// Required. Full resource name of automl Model.
/// Format:
/// `projects/{project}/locations/{location}/models/{model}`.
#[prost(string, tag = "1")]
pub model: ::prost::alloc::string::String,
/// Optional. Display name of the model in Vertex AI.
/// System will pick a display name if unspecified.
#[prost(string, tag = "2")]
pub model_display_name: ::prost::alloc::string::String,
}
/// Config for migrating Dataset in automl.googleapis.com to Vertex AI's
/// Dataset.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MigrateAutomlDatasetConfig {
/// Required. Full resource name of automl Dataset.
/// Format:
/// `projects/{project}/locations/{location}/datasets/{dataset}`.
#[prost(string, tag = "1")]
pub dataset: ::prost::alloc::string::String,
/// Required. Display name of the Dataset in Vertex AI.
/// System will pick a display name if unspecified.
#[prost(string, tag = "2")]
pub dataset_display_name: ::prost::alloc::string::String,
}
/// Config for migrating Dataset in datalabeling.googleapis.com to Vertex
/// AI's Dataset.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MigrateDataLabelingDatasetConfig {
/// Required. Full resource name of data labeling Dataset.
/// Format:
/// `projects/{project}/datasets/{dataset}`.
#[prost(string, tag = "1")]
pub dataset: ::prost::alloc::string::String,
/// Optional. Display name of the Dataset in Vertex AI.
/// System will pick a display name if unspecified.
#[prost(string, tag = "2")]
pub dataset_display_name: ::prost::alloc::string::String,
/// Optional. Configs for migrating AnnotatedDataset in datalabeling.googleapis.com to
/// Vertex AI's SavedQuery. The specified AnnotatedDatasets have to belong
/// to the datalabeling Dataset.
#[prost(message, repeated, tag = "3")]
pub migrate_data_labeling_annotated_dataset_configs: ::prost::alloc::vec::Vec<
migrate_data_labeling_dataset_config::MigrateDataLabelingAnnotatedDatasetConfig,
>,
}
/// Nested message and enum types in `MigrateDataLabelingDatasetConfig`.
pub mod migrate_data_labeling_dataset_config {
/// Config for migrating AnnotatedDataset in datalabeling.googleapis.com to
/// Vertex AI's SavedQuery.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MigrateDataLabelingAnnotatedDatasetConfig {
/// Required. Full resource name of data labeling AnnotatedDataset.
/// Format:
/// `projects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}`.
#[prost(string, tag = "1")]
pub annotated_dataset: ::prost::alloc::string::String,
}
}
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Request {
/// Config for migrating Version in ml.googleapis.com to Vertex AI's Model.
#[prost(message, tag = "1")]
MigrateMlEngineModelVersionConfig(MigrateMlEngineModelVersionConfig),
/// Config for migrating Model in automl.googleapis.com to Vertex AI's
/// Model.
#[prost(message, tag = "2")]
MigrateAutomlModelConfig(MigrateAutomlModelConfig),
/// Config for migrating Dataset in automl.googleapis.com to Vertex AI's
/// Dataset.
#[prost(message, tag = "3")]
MigrateAutomlDatasetConfig(MigrateAutomlDatasetConfig),
/// Config for migrating Dataset in datalabeling.googleapis.com to
/// Vertex AI's Dataset.
#[prost(message, tag = "4")]
MigrateDataLabelingDatasetConfig(MigrateDataLabelingDatasetConfig),
}
}
/// Response message for \[MigrationService.BatchMigrateResources][google.cloud.aiplatform.v1.MigrationService.BatchMigrateResources\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BatchMigrateResourcesResponse {
/// Successfully migrated resources.
#[prost(message, repeated, tag = "1")]
pub migrate_resource_responses: ::prost::alloc::vec::Vec<MigrateResourceResponse>,
}
/// Describes a successfully migrated resource.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MigrateResourceResponse {
/// Before migration, the identifier in ml.googleapis.com,
/// automl.googleapis.com or datalabeling.googleapis.com.
#[prost(message, optional, tag = "3")]
pub migratable_resource: ::core::option::Option<MigratableResource>,
/// After migration, the resource name in Vertex AI.
#[prost(oneof = "migrate_resource_response::MigratedResource", tags = "1, 2")]
pub migrated_resource: ::core::option::Option<migrate_resource_response::MigratedResource>,
}
/// Nested message and enum types in `MigrateResourceResponse`.
pub mod migrate_resource_response {
/// After migration, the resource name in Vertex AI.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum MigratedResource {
/// Migrated Dataset's resource name.
#[prost(string, tag = "1")]
Dataset(::prost::alloc::string::String),
/// Migrated Model's resource name.
#[prost(string, tag = "2")]
Model(::prost::alloc::string::String),
}
}
/// Runtime operation information for \[MigrationService.BatchMigrateResources][google.cloud.aiplatform.v1.MigrationService.BatchMigrateResources\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BatchMigrateResourcesOperationMetadata {
/// The common part of the operation metadata.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
/// Partial results that reflect the latest migration operation progress.
#[prost(message, repeated, tag = "2")]
pub partial_results:
::prost::alloc::vec::Vec<batch_migrate_resources_operation_metadata::PartialResult>,
}
/// Nested message and enum types in `BatchMigrateResourcesOperationMetadata`.
pub mod batch_migrate_resources_operation_metadata {
/// Represents a partial result in batch migration operation for one
/// \[MigrateResourceRequest][google.cloud.aiplatform.v1.MigrateResourceRequest\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PartialResult {
/// It's the same as the value in
/// \[MigrateResourceRequest.migrate_resource_requests][\].
#[prost(message, optional, tag = "1")]
pub request: ::core::option::Option<super::MigrateResourceRequest>,
/// If the resource's migration is ongoing, none of the result will be set.
/// If the resource's migration is finished, either error or one of the
/// migrated resource name will be filled.
#[prost(oneof = "partial_result::Result", tags = "2, 3, 4")]
pub result: ::core::option::Option<partial_result::Result>,
}
/// Nested message and enum types in `PartialResult`.
pub mod partial_result {
/// If the resource's migration is ongoing, none of the result will be set.
/// If the resource's migration is finished, either error or one of the
/// migrated resource name will be filled.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Result {
/// The error result of the migration request in case of failure.
#[prost(message, tag = "2")]
Error(super::super::super::super::super::rpc::Status),
/// Migrated model resource name.
#[prost(string, tag = "3")]
Model(::prost::alloc::string::String),
/// Migrated dataset resource name.
#[prost(string, tag = "4")]
Dataset(::prost::alloc::string::String),
}
}
}
#[doc = r" Generated client implementations."]
pub mod migration_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " A service that migrates resources from automl.googleapis.com,"]
#[doc = " datalabeling.googleapis.com and ml.googleapis.com to Vertex AI."]
#[derive(Debug, Clone)]
pub struct MigrationServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> MigrationServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> MigrationServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
MigrationServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Searches all of the resources in automl.googleapis.com,"]
#[doc = " datalabeling.googleapis.com and ml.googleapis.com that can be migrated to"]
#[doc = " Vertex AI's given location."]
pub async fn search_migratable_resources(
&mut self,
request: impl tonic::IntoRequest<super::SearchMigratableResourcesRequest>,
) -> Result<tonic::Response<super::SearchMigratableResourcesResponse>, tonic::Status>
{
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MigrationService/SearchMigratableResources",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Batch migrates resources from ml.googleapis.com, automl.googleapis.com,"]
#[doc = " and datalabeling.googleapis.com to Vertex AI."]
pub async fn batch_migrate_resources(
&mut self,
request: impl tonic::IntoRequest<super::BatchMigrateResourcesRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.MigrationService/BatchMigrateResources",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
}
/// A collection of metrics calculated by comparing Model's predictions on all of
/// the test data against annotations from the test data.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ModelEvaluation {
/// Output only. The resource name of the ModelEvaluation.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Output only. Points to a YAML file stored on Google Cloud Storage describing the
/// \[metrics][google.cloud.aiplatform.v1.ModelEvaluation.metrics\] of this ModelEvaluation. The schema is
/// defined as an OpenAPI 3.0.2 [Schema
/// Object](<https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject>).
#[prost(string, tag = "2")]
pub metrics_schema_uri: ::prost::alloc::string::String,
/// Output only. Evaluation metrics of the Model. The schema of the metrics is stored in
/// \[metrics_schema_uri][google.cloud.aiplatform.v1.ModelEvaluation.metrics_schema_uri\]
#[prost(message, optional, tag = "3")]
pub metrics: ::core::option::Option<::prost_types::Value>,
/// Output only. Timestamp when this ModelEvaluation was created.
#[prost(message, optional, tag = "4")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. All possible \[dimensions][ModelEvaluationSlice.slice.dimension\] of
/// ModelEvaluationSlices. The dimensions can be used as the filter of the
/// \[ModelService.ListModelEvaluationSlices][google.cloud.aiplatform.v1.ModelService.ListModelEvaluationSlices\] request, in the form of
/// `slice.dimension = <dimension>`.
#[prost(string, repeated, tag = "5")]
pub slice_dimensions: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
/// Output only. Aggregated explanation metrics for the Model's prediction output over the
/// data this ModelEvaluation uses. This field is populated only if the Model
/// is evaluated with explanations, and only for AutoML tabular Models.
///
#[prost(message, optional, tag = "8")]
pub model_explanation: ::core::option::Option<ModelExplanation>,
}
/// A collection of metrics calculated by comparing Model's predictions on a
/// slice of the test data against ground truth annotations.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ModelEvaluationSlice {
/// Output only. The resource name of the ModelEvaluationSlice.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Output only. The slice of the test data that is used to evaluate the Model.
#[prost(message, optional, tag = "2")]
pub slice: ::core::option::Option<model_evaluation_slice::Slice>,
/// Output only. Points to a YAML file stored on Google Cloud Storage describing the
/// \[metrics][google.cloud.aiplatform.v1.ModelEvaluationSlice.metrics\] of this ModelEvaluationSlice. The
/// schema is defined as an OpenAPI 3.0.2 [Schema
/// Object](<https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject>).
#[prost(string, tag = "3")]
pub metrics_schema_uri: ::prost::alloc::string::String,
/// Output only. Sliced evaluation metrics of the Model. The schema of the metrics is stored
/// in \[metrics_schema_uri][google.cloud.aiplatform.v1.ModelEvaluationSlice.metrics_schema_uri\]
#[prost(message, optional, tag = "4")]
pub metrics: ::core::option::Option<::prost_types::Value>,
/// Output only. Timestamp when this ModelEvaluationSlice was created.
#[prost(message, optional, tag = "5")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
}
/// Nested message and enum types in `ModelEvaluationSlice`.
pub mod model_evaluation_slice {
/// Definition of a slice.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Slice {
/// Output only. The dimension of the slice.
/// Well-known dimensions are:
/// * `annotationSpec`: This slice is on the test data that has either
/// ground truth or prediction with \[AnnotationSpec.display_name][google.cloud.aiplatform.v1.AnnotationSpec.display_name\]
/// equals to \[value][google.cloud.aiplatform.v1.ModelEvaluationSlice.Slice.value\].
#[prost(string, tag = "1")]
pub dimension: ::prost::alloc::string::String,
/// Output only. The value of the dimension in this slice.
#[prost(string, tag = "2")]
pub value: ::prost::alloc::string::String,
}
}
/// Request message for \[ModelService.UploadModel][google.cloud.aiplatform.v1.ModelService.UploadModel\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UploadModelRequest {
/// Required. The resource name of the Location into which to upload the Model.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The Model to create.
#[prost(message, optional, tag = "2")]
pub model: ::core::option::Option<Model>,
}
/// Details of \[ModelService.UploadModel][google.cloud.aiplatform.v1.ModelService.UploadModel\] operation.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UploadModelOperationMetadata {
/// The common part of the operation metadata.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
/// Response message of \[ModelService.UploadModel][google.cloud.aiplatform.v1.ModelService.UploadModel\] operation.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UploadModelResponse {
/// The name of the uploaded Model resource.
/// Format: `projects/{project}/locations/{location}/models/{model}`
#[prost(string, tag = "1")]
pub model: ::prost::alloc::string::String,
}
/// Request message for \[ModelService.GetModel][google.cloud.aiplatform.v1.ModelService.GetModel\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetModelRequest {
/// Required. The name of the Model resource.
/// Format: `projects/{project}/locations/{location}/models/{model}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[ModelService.ListModels][google.cloud.aiplatform.v1.ModelService.ListModels\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListModelsRequest {
/// Required. The resource name of the Location to list the Models from.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// An expression for filtering the results of the request. For field names
/// both snake_case and camelCase are supported.
///
/// * `model` supports = and !=. `model` represents the Model ID,
/// i.e. the last segment of the Model's [resource name]\[google.cloud.aiplatform.v1.Model.name\].
/// * `display_name` supports = and !=
/// * `labels` supports general map functions that is:
/// * `labels.key=value` - key:value equality
/// * `labels.key:* or labels:key - key existence
/// * A key including a space must be quoted. `labels."a key"`.
///
/// Some examples:
/// * `model=1234`
/// * `displayName="myDisplayName"`
/// * `labels.myKey="myValue"`
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The standard list page size.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// The standard list page token.
/// Typically obtained via
/// \[ListModelsResponse.next_page_token][google.cloud.aiplatform.v1.ListModelsResponse.next_page_token\] of the previous
/// \[ModelService.ListModels][google.cloud.aiplatform.v1.ModelService.ListModels\] call.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Mask specifying which fields to read.
#[prost(message, optional, tag = "5")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
/// A comma-separated list of fields to order by, sorted in ascending order.
/// Use "desc" after a field name for descending.
/// Supported fields:
/// * `display_name`
/// * `create_time`
/// * `update_time`
///
/// Example: `display_name, create_time desc`.
#[prost(string, tag = "6")]
pub order_by: ::prost::alloc::string::String,
}
/// Response message for \[ModelService.ListModels][google.cloud.aiplatform.v1.ModelService.ListModels\]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListModelsResponse {
/// List of Models in the requested page.
#[prost(message, repeated, tag = "1")]
pub models: ::prost::alloc::vec::Vec<Model>,
/// A token to retrieve next page of results.
/// Pass to \[ListModelsRequest.page_token][google.cloud.aiplatform.v1.ListModelsRequest.page_token\] to obtain that page.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[ModelService.UpdateModel][google.cloud.aiplatform.v1.ModelService.UpdateModel\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateModelRequest {
/// Required. The Model which replaces the resource on the server.
#[prost(message, optional, tag = "1")]
pub model: ::core::option::Option<Model>,
/// Required. The update mask applies to the resource.
/// For the `FieldMask` definition, see \[google.protobuf.FieldMask][google.protobuf.FieldMask\].
#[prost(message, optional, tag = "2")]
pub update_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Request message for \[ModelService.DeleteModel][google.cloud.aiplatform.v1.ModelService.DeleteModel\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteModelRequest {
/// Required. The name of the Model resource to be deleted.
/// Format: `projects/{project}/locations/{location}/models/{model}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[ModelService.ExportModel][google.cloud.aiplatform.v1.ModelService.ExportModel\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExportModelRequest {
/// Required. The resource name of the Model to export.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. The desired output location and configuration.
#[prost(message, optional, tag = "2")]
pub output_config: ::core::option::Option<export_model_request::OutputConfig>,
}
/// Nested message and enum types in `ExportModelRequest`.
pub mod export_model_request {
/// Output configuration for the Model export.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct OutputConfig {
/// The ID of the format in which the Model must be exported. Each Model
/// lists the [export formats it supports]\[google.cloud.aiplatform.v1.Model.supported_export_formats\].
/// If no value is provided here, then the first from the list of the Model's
/// supported formats is used by default.
#[prost(string, tag = "1")]
pub export_format_id: ::prost::alloc::string::String,
/// The Cloud Storage location where the Model artifact is to be
/// written to. Under the directory given as the destination a new one with
/// name "`model-export-<model-display-name>-<timestamp-of-export-call>`",
/// where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format,
/// will be created. Inside, the Model and any of its supporting files
/// will be written.
/// This field should only be set when the `exportableContent` field of the
/// \[Model.supported_export_formats\] object contains `ARTIFACT`.
#[prost(message, optional, tag = "3")]
pub artifact_destination: ::core::option::Option<super::GcsDestination>,
/// The Google Container Registry or Artifact Registry uri where the
/// Model container image will be copied to.
/// This field should only be set when the `exportableContent` field of the
/// \[Model.supported_export_formats\] object contains `IMAGE`.
#[prost(message, optional, tag = "4")]
pub image_destination: ::core::option::Option<super::ContainerRegistryDestination>,
}
}
/// Details of \[ModelService.ExportModel][google.cloud.aiplatform.v1.ModelService.ExportModel\] operation.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExportModelOperationMetadata {
/// The common part of the operation metadata.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
/// Output only. Information further describing the output of this Model export.
#[prost(message, optional, tag = "2")]
pub output_info: ::core::option::Option<export_model_operation_metadata::OutputInfo>,
}
/// Nested message and enum types in `ExportModelOperationMetadata`.
pub mod export_model_operation_metadata {
/// Further describes the output of the ExportModel. Supplements
/// \[ExportModelRequest.OutputConfig][google.cloud.aiplatform.v1.ExportModelRequest.OutputConfig\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct OutputInfo {
/// Output only. If the Model artifact is being exported to Google Cloud Storage this is
/// the full path of the directory created, into which the Model files are
/// being written to.
#[prost(string, tag = "2")]
pub artifact_output_uri: ::prost::alloc::string::String,
/// Output only. If the Model image is being exported to Google Container Registry or
/// Artifact Registry this is the full path of the image created.
#[prost(string, tag = "3")]
pub image_output_uri: ::prost::alloc::string::String,
}
}
/// Response message of \[ModelService.ExportModel][google.cloud.aiplatform.v1.ModelService.ExportModel\] operation.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExportModelResponse {}
/// Request message for \[ModelService.GetModelEvaluation][google.cloud.aiplatform.v1.ModelService.GetModelEvaluation\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetModelEvaluationRequest {
/// Required. The name of the ModelEvaluation resource.
/// Format:
/// `projects/{project}/locations/{location}/models/{model}/evaluations/{evaluation}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[ModelService.ListModelEvaluations][google.cloud.aiplatform.v1.ModelService.ListModelEvaluations\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListModelEvaluationsRequest {
/// Required. The resource name of the Model to list the ModelEvaluations from.
/// Format: `projects/{project}/locations/{location}/models/{model}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The standard list filter.
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The standard list page size.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// The standard list page token.
/// Typically obtained via
/// \[ListModelEvaluationsResponse.next_page_token][google.cloud.aiplatform.v1.ListModelEvaluationsResponse.next_page_token\] of the previous
/// \[ModelService.ListModelEvaluations][google.cloud.aiplatform.v1.ModelService.ListModelEvaluations\] call.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Mask specifying which fields to read.
#[prost(message, optional, tag = "5")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Response message for \[ModelService.ListModelEvaluations][google.cloud.aiplatform.v1.ModelService.ListModelEvaluations\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListModelEvaluationsResponse {
/// List of ModelEvaluations in the requested page.
#[prost(message, repeated, tag = "1")]
pub model_evaluations: ::prost::alloc::vec::Vec<ModelEvaluation>,
/// A token to retrieve next page of results.
/// Pass to \[ListModelEvaluationsRequest.page_token][google.cloud.aiplatform.v1.ListModelEvaluationsRequest.page_token\] to obtain that page.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[ModelService.GetModelEvaluationSlice][google.cloud.aiplatform.v1.ModelService.GetModelEvaluationSlice\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetModelEvaluationSliceRequest {
/// Required. The name of the ModelEvaluationSlice resource.
/// Format:
/// `projects/{project}/locations/{location}/models/{model}/evaluations/{evaluation}/slices/{slice}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[ModelService.ListModelEvaluationSlices][google.cloud.aiplatform.v1.ModelService.ListModelEvaluationSlices\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListModelEvaluationSlicesRequest {
/// Required. The resource name of the ModelEvaluation to list the ModelEvaluationSlices
/// from. Format:
/// `projects/{project}/locations/{location}/models/{model}/evaluations/{evaluation}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The standard list filter.
///
/// * `slice.dimension` - for =.
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The standard list page size.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// The standard list page token.
/// Typically obtained via
/// \[ListModelEvaluationSlicesResponse.next_page_token][google.cloud.aiplatform.v1.ListModelEvaluationSlicesResponse.next_page_token\] of the previous
/// \[ModelService.ListModelEvaluationSlices][google.cloud.aiplatform.v1.ModelService.ListModelEvaluationSlices\] call.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Mask specifying which fields to read.
#[prost(message, optional, tag = "5")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Response message for \[ModelService.ListModelEvaluationSlices][google.cloud.aiplatform.v1.ModelService.ListModelEvaluationSlices\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListModelEvaluationSlicesResponse {
/// List of ModelEvaluations in the requested page.
#[prost(message, repeated, tag = "1")]
pub model_evaluation_slices: ::prost::alloc::vec::Vec<ModelEvaluationSlice>,
/// A token to retrieve next page of results.
/// Pass to \[ListModelEvaluationSlicesRequest.page_token][google.cloud.aiplatform.v1.ListModelEvaluationSlicesRequest.page_token\] to obtain that
/// page.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
#[doc = r" Generated client implementations."]
pub mod model_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " A service for managing Vertex AI's machine learning Models."]
#[derive(Debug, Clone)]
pub struct ModelServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> ModelServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> ModelServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
ModelServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Uploads a Model artifact into Vertex AI."]
pub async fn upload_model(
&mut self,
request: impl tonic::IntoRequest<super::UploadModelRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.ModelService/UploadModel",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets a Model."]
pub async fn get_model(
&mut self,
request: impl tonic::IntoRequest<super::GetModelRequest>,
) -> Result<tonic::Response<super::Model>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.ModelService/GetModel",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists Models in a Location."]
pub async fn list_models(
&mut self,
request: impl tonic::IntoRequest<super::ListModelsRequest>,
) -> Result<tonic::Response<super::ListModelsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.ModelService/ListModels",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates a Model."]
pub async fn update_model(
&mut self,
request: impl tonic::IntoRequest<super::UpdateModelRequest>,
) -> Result<tonic::Response<super::Model>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.ModelService/UpdateModel",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a Model."]
#[doc = ""]
#[doc = " Model can only be deleted if there are no [DeployedModels][] created"]
#[doc = " from it."]
pub async fn delete_model(
&mut self,
request: impl tonic::IntoRequest<super::DeleteModelRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.ModelService/DeleteModel",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Exports a trained, exportable, Model to a location specified by the"]
#[doc = " user. A Model is considered to be exportable if it has at least one"]
#[doc = " [supported export format][google.cloud.aiplatform.v1.Model.supported_export_formats]."]
pub async fn export_model(
&mut self,
request: impl tonic::IntoRequest<super::ExportModelRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.ModelService/ExportModel",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets a ModelEvaluation."]
pub async fn get_model_evaluation(
&mut self,
request: impl tonic::IntoRequest<super::GetModelEvaluationRequest>,
) -> Result<tonic::Response<super::ModelEvaluation>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.ModelService/GetModelEvaluation",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists ModelEvaluations in a Model."]
pub async fn list_model_evaluations(
&mut self,
request: impl tonic::IntoRequest<super::ListModelEvaluationsRequest>,
) -> Result<tonic::Response<super::ListModelEvaluationsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.ModelService/ListModelEvaluations",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets a ModelEvaluationSlice."]
pub async fn get_model_evaluation_slice(
&mut self,
request: impl tonic::IntoRequest<super::GetModelEvaluationSliceRequest>,
) -> Result<tonic::Response<super::ModelEvaluationSlice>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.ModelService/GetModelEvaluationSlice",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists ModelEvaluationSlices in a ModelEvaluation."]
pub async fn list_model_evaluation_slices(
&mut self,
request: impl tonic::IntoRequest<super::ListModelEvaluationSlicesRequest>,
) -> Result<tonic::Response<super::ListModelEvaluationSlicesResponse>, tonic::Status>
{
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.ModelService/ListModelEvaluationSlices",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
}
/// An instance of a machine learning PipelineJob.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PipelineJob {
/// Output only. The resource name of the PipelineJob.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// The display name of the Pipeline.
/// The name can be up to 128 characters long and can be consist of any UTF-8
/// characters.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// Output only. Pipeline creation time.
#[prost(message, optional, tag = "3")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Pipeline start time.
#[prost(message, optional, tag = "4")]
pub start_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Pipeline end time.
#[prost(message, optional, tag = "5")]
pub end_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this PipelineJob was most recently updated.
#[prost(message, optional, tag = "6")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// Required. The spec of the pipeline.
#[prost(message, optional, tag = "7")]
pub pipeline_spec: ::core::option::Option<::prost_types::Struct>,
/// Output only. The detailed state of the job.
#[prost(enumeration = "PipelineState", tag = "8")]
pub state: i32,
/// Output only. The details of pipeline run. Not available in the list view.
#[prost(message, optional, tag = "9")]
pub job_detail: ::core::option::Option<PipelineJobDetail>,
/// Output only. The error that occurred during pipeline execution.
/// Only populated when the pipeline's state is FAILED or CANCELLED.
#[prost(message, optional, tag = "10")]
pub error: ::core::option::Option<super::super::super::rpc::Status>,
/// The labels with user-defined metadata to organize PipelineJob.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
///
/// See <https://goo.gl/xmQnxf> for more information and examples of labels.
#[prost(map = "string, string", tag = "11")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Runtime config of the pipeline.
#[prost(message, optional, tag = "12")]
pub runtime_config: ::core::option::Option<pipeline_job::RuntimeConfig>,
/// Customer-managed encryption key spec for a pipelineJob. If set, this
/// PipelineJob and all of its sub-resources will be secured by this key.
#[prost(message, optional, tag = "16")]
pub encryption_spec: ::core::option::Option<EncryptionSpec>,
/// The service account that the pipeline workload runs as.
/// If not specified, the Compute Engine default service account in the project
/// will be used.
/// See
/// <https://cloud.google.com/compute/docs/access/service-accounts#default_service_account>
///
/// Users starting the pipeline must have the `iam.serviceAccounts.actAs`
/// permission on this service account.
#[prost(string, tag = "17")]
pub service_account: ::prost::alloc::string::String,
/// The full name of the Compute Engine
/// \[network\](/compute/docs/networks-and-firewalls#networks) to which the
/// Pipeline Job's workload should be peered. For example,
/// `projects/12345/global/networks/myVPC`.
/// \[Format\](/compute/docs/reference/rest/v1/networks/insert)
/// is of the form `projects/{project}/global/networks/{network}`.
/// Where {project} is a project number, as in `12345`, and {network} is a
/// network name.
///
/// Private services access must already be configured for the network.
/// Pipeline job will apply the network configuration to the GCP resources
/// being launched, if applied, such as Vertex AI
/// Training or Dataflow job. If left unspecified, the workload is not peered
/// with any network.
#[prost(string, tag = "18")]
pub network: ::prost::alloc::string::String,
}
/// Nested message and enum types in `PipelineJob`.
pub mod pipeline_job {
/// The runtime config of a PipelineJob.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct RuntimeConfig {
/// Deprecated. Use \[RuntimeConfig.parameter_values\] instead. The runtime
/// parameters of the PipelineJob. The parameters will be passed into
/// \[PipelineJob.pipeline_spec][google.cloud.aiplatform.v1.PipelineJob.pipeline_spec\] to replace the placeholders at runtime.
#[prost(map = "string, message", tag = "1")]
pub parameters: ::std::collections::HashMap<::prost::alloc::string::String, super::Value>,
/// Required. A path in a Cloud Storage bucket, which will be treated as the root
/// output directory of the pipeline. It is used by the system to
/// generate the paths of output artifacts. The artifact paths are generated
/// with a sub-path pattern `{job_id}/{task_id}/{output_key}` under the
/// specified output directory. The service account specified in this
/// pipeline must have the `storage.objects.get` and `storage.objects.create`
/// permissions for this bucket.
#[prost(string, tag = "2")]
pub gcs_output_directory: ::prost::alloc::string::String,
/// The runtime parameters of the PipelineJob. The parameters will be
/// passed into \[PipelineJob.pipeline_spec][google.cloud.aiplatform.v1.PipelineJob.pipeline_spec\] to replace the placeholders
/// at runtime.
#[prost(map = "string, message", tag = "3")]
pub parameter_values:
::std::collections::HashMap<::prost::alloc::string::String, ::prost_types::Value>,
}
}
/// The runtime detail of PipelineJob.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PipelineJobDetail {
/// Output only. The context of the pipeline.
#[prost(message, optional, tag = "1")]
pub pipeline_context: ::core::option::Option<Context>,
/// Output only. The context of the current pipeline run.
#[prost(message, optional, tag = "2")]
pub pipeline_run_context: ::core::option::Option<Context>,
/// Output only. The runtime details of the tasks under the pipeline.
#[prost(message, repeated, tag = "3")]
pub task_details: ::prost::alloc::vec::Vec<PipelineTaskDetail>,
}
/// The runtime detail of a task execution.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PipelineTaskDetail {
/// Output only. The system generated ID of the task.
#[prost(int64, tag = "1")]
pub task_id: i64,
/// Output only. The id of the parent task if the task is within a component scope.
/// Empty if the task is at the root level.
#[prost(int64, tag = "12")]
pub parent_task_id: i64,
/// Output only. The user specified name of the task that is defined in
/// \[PipelineJob.spec][\].
#[prost(string, tag = "2")]
pub task_name: ::prost::alloc::string::String,
/// Output only. Task create time.
#[prost(message, optional, tag = "3")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Task start time.
#[prost(message, optional, tag = "4")]
pub start_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Task end time.
#[prost(message, optional, tag = "5")]
pub end_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. The detailed execution info.
#[prost(message, optional, tag = "6")]
pub executor_detail: ::core::option::Option<PipelineTaskExecutorDetail>,
/// Output only. State of the task.
#[prost(enumeration = "pipeline_task_detail::State", tag = "7")]
pub state: i32,
/// Output only. The execution metadata of the task.
#[prost(message, optional, tag = "8")]
pub execution: ::core::option::Option<Execution>,
/// Output only. The error that occurred during task execution.
/// Only populated when the task's state is FAILED or CANCELLED.
#[prost(message, optional, tag = "9")]
pub error: ::core::option::Option<super::super::super::rpc::Status>,
/// Output only. A list of task status. This field keeps a record of task status evolving
/// over time.
#[prost(message, repeated, tag = "13")]
pub pipeline_task_status: ::prost::alloc::vec::Vec<pipeline_task_detail::PipelineTaskStatus>,
/// Output only. The runtime input artifacts of the task.
#[prost(map = "string, message", tag = "10")]
pub inputs: ::std::collections::HashMap<
::prost::alloc::string::String,
pipeline_task_detail::ArtifactList,
>,
/// Output only. The runtime output artifacts of the task.
#[prost(map = "string, message", tag = "11")]
pub outputs: ::std::collections::HashMap<
::prost::alloc::string::String,
pipeline_task_detail::ArtifactList,
>,
}
/// Nested message and enum types in `PipelineTaskDetail`.
pub mod pipeline_task_detail {
/// A single record of the task status.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PipelineTaskStatus {
/// Output only. Update time of this status.
#[prost(message, optional, tag = "1")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. The state of the task.
#[prost(enumeration = "State", tag = "2")]
pub state: i32,
/// Output only. The error that occurred during the state. May be set when the state is
/// any of the non-final state (PENDING/RUNNING/CANCELLING) or FAILED state.
/// If the state is FAILED, the error here is final and not going to be
/// retried.
/// If the state is a non-final state, the error indicates a system-error
/// being retried.
#[prost(message, optional, tag = "3")]
pub error: ::core::option::Option<super::super::super::super::rpc::Status>,
}
/// A list of artifact metadata.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ArtifactList {
/// Output only. A list of artifact metadata.
#[prost(message, repeated, tag = "1")]
pub artifacts: ::prost::alloc::vec::Vec<super::Artifact>,
}
/// Specifies state of TaskExecution
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum State {
/// Unspecified.
Unspecified = 0,
/// Specifies pending state for the task.
Pending = 1,
/// Specifies task is being executed.
Running = 2,
/// Specifies task completed successfully.
Succeeded = 3,
/// Specifies Task cancel is in pending state.
CancelPending = 4,
/// Specifies task is being cancelled.
Cancelling = 5,
/// Specifies task was cancelled.
Cancelled = 6,
/// Specifies task failed.
Failed = 7,
/// Specifies task was skipped due to cache hit.
Skipped = 8,
/// Specifies that the task was not triggered because the task's trigger
/// policy is not satisfied. The trigger policy is specified in the
/// `condition` field of \[PipelineJob.pipeline_spec][google.cloud.aiplatform.v1.PipelineJob.pipeline_spec\].
NotTriggered = 9,
}
}
/// The runtime detail of a pipeline executor.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PipelineTaskExecutorDetail {
#[prost(oneof = "pipeline_task_executor_detail::Details", tags = "1, 2")]
pub details: ::core::option::Option<pipeline_task_executor_detail::Details>,
}
/// Nested message and enum types in `PipelineTaskExecutorDetail`.
pub mod pipeline_task_executor_detail {
/// The detail of a container execution. It contains the job names of the
/// lifecycle of a container execution.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ContainerDetail {
/// Output only. The name of the \[CustomJob][google.cloud.aiplatform.v1.CustomJob\] for the main container execution.
#[prost(string, tag = "1")]
pub main_job: ::prost::alloc::string::String,
/// Output only. The name of the \[CustomJob][google.cloud.aiplatform.v1.CustomJob\] for the pre-caching-check container
/// execution. This job will be available if the
/// \[PipelineJob.pipeline_spec][google.cloud.aiplatform.v1.PipelineJob.pipeline_spec\] specifies the `pre_caching_check` hook in
/// the lifecycle events.
#[prost(string, tag = "2")]
pub pre_caching_check_job: ::prost::alloc::string::String,
}
/// The detailed info for a custom job executor.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CustomJobDetail {
/// Output only. The name of the \[CustomJob][google.cloud.aiplatform.v1.CustomJob\].
#[prost(string, tag = "1")]
pub job: ::prost::alloc::string::String,
}
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Details {
/// Output only. The detailed info for a container executor.
#[prost(message, tag = "1")]
ContainerDetail(ContainerDetail),
/// Output only. The detailed info for a custom job executor.
#[prost(message, tag = "2")]
CustomJobDetail(CustomJobDetail),
}
}
/// Request message for \[PipelineService.CreateTrainingPipeline][google.cloud.aiplatform.v1.PipelineService.CreateTrainingPipeline\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateTrainingPipelineRequest {
/// Required. The resource name of the Location to create the TrainingPipeline in.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The TrainingPipeline to create.
#[prost(message, optional, tag = "2")]
pub training_pipeline: ::core::option::Option<TrainingPipeline>,
}
/// Request message for \[PipelineService.GetTrainingPipeline][google.cloud.aiplatform.v1.PipelineService.GetTrainingPipeline\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetTrainingPipelineRequest {
/// Required. The name of the TrainingPipeline resource.
/// Format:
/// `projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[PipelineService.ListTrainingPipelines][google.cloud.aiplatform.v1.PipelineService.ListTrainingPipelines\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListTrainingPipelinesRequest {
/// Required. The resource name of the Location to list the TrainingPipelines from.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The standard list filter.
/// Supported fields:
///
/// * `display_name` supports = and !=.
///
/// * `state` supports = and !=.
///
/// Some examples of using the filter are:
///
/// * `state="PIPELINE_STATE_SUCCEEDED" AND display_name="my_pipeline"`
///
/// * `state="PIPELINE_STATE_RUNNING" OR display_name="my_pipeline"`
///
/// * `NOT display_name="my_pipeline"`
///
/// * `state="PIPELINE_STATE_FAILED"`
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The standard list page size.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// The standard list page token.
/// Typically obtained via
/// \[ListTrainingPipelinesResponse.next_page_token][google.cloud.aiplatform.v1.ListTrainingPipelinesResponse.next_page_token\] of the previous
/// \[PipelineService.ListTrainingPipelines][google.cloud.aiplatform.v1.PipelineService.ListTrainingPipelines\] call.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Mask specifying which fields to read.
#[prost(message, optional, tag = "5")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Response message for \[PipelineService.ListTrainingPipelines][google.cloud.aiplatform.v1.PipelineService.ListTrainingPipelines\]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListTrainingPipelinesResponse {
/// List of TrainingPipelines in the requested page.
#[prost(message, repeated, tag = "1")]
pub training_pipelines: ::prost::alloc::vec::Vec<TrainingPipeline>,
/// A token to retrieve the next page of results.
/// Pass to \[ListTrainingPipelinesRequest.page_token][google.cloud.aiplatform.v1.ListTrainingPipelinesRequest.page_token\] to obtain that page.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[PipelineService.DeleteTrainingPipeline][google.cloud.aiplatform.v1.PipelineService.DeleteTrainingPipeline\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteTrainingPipelineRequest {
/// Required. The name of the TrainingPipeline resource to be deleted.
/// Format:
/// `projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[PipelineService.CancelTrainingPipeline][google.cloud.aiplatform.v1.PipelineService.CancelTrainingPipeline\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CancelTrainingPipelineRequest {
/// Required. The name of the TrainingPipeline to cancel.
/// Format:
/// `projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[PipelineService.CreatePipelineJob][google.cloud.aiplatform.v1.PipelineService.CreatePipelineJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreatePipelineJobRequest {
/// Required. The resource name of the Location to create the PipelineJob in.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The PipelineJob to create.
#[prost(message, optional, tag = "2")]
pub pipeline_job: ::core::option::Option<PipelineJob>,
/// The ID to use for the PipelineJob, which will become the final component of
/// the PipelineJob name. If not provided, an ID will be automatically
/// generated.
///
/// This value should be less than 128 characters, and valid characters
/// are /\[a-z][0-9\]-/.
#[prost(string, tag = "3")]
pub pipeline_job_id: ::prost::alloc::string::String,
}
/// Request message for \[PipelineService.GetPipelineJob][google.cloud.aiplatform.v1.PipelineService.GetPipelineJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetPipelineJobRequest {
/// Required. The name of the PipelineJob resource.
/// Format:
/// `projects/{project}/locations/{location}/pipelineJobs/{pipeline_job}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[PipelineService.ListPipelineJobs][google.cloud.aiplatform.v1.PipelineService.ListPipelineJobs\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListPipelineJobsRequest {
/// Required. The resource name of the Location to list the PipelineJobs from.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Lists the PipelineJobs that match the filter expression. The following
/// fields are supported:
///
/// * `pipeline_name`: Supports `=` and `!=` comparisons.
/// * `display_name`: Supports `=`, `!=` comparisons, and `:` wildcard.
/// * `pipeline_job_user_id`: Supports `=`, `!=` comparisons, and `:` wildcard.
/// for example, can check if pipeline's display_name contains *step* by doing
/// display_name:\"*step*\"
/// * `create_time`: Supports `=`, `!=`, `<`, `>`, `<=`, and `>=` comparisons.
/// Values must be in RFC 3339 format.
/// * `update_time`: Supports `=`, `!=`, `<`, `>`, `<=`, and `>=` comparisons.
/// Values must be in RFC 3339 format.
/// * `end_time`: Supports `=`, `!=`, `<`, `>`, `<=`, and `>=` comparisons.
/// Values must be in RFC 3339 format.
/// * `labels`: Supports key-value equality and key presence.
///
/// Filter expressions can be combined together using logical operators
/// (`AND` & `OR`).
/// For example: `pipeline_name="test" AND create_time>"2020-05-18T13:30:00Z"`.
///
/// The syntax to define filter expression is based on
/// <https://google.aip.dev/160.>
///
/// Examples:
///
/// * `create_time>"2021-05-18T00:00:00Z" OR
/// update_time>"2020-05-18T00:00:00Z"` PipelineJobs created or updated
/// after 2020-05-18 00:00:00 UTC.
/// * `labels.env = "prod"`
/// PipelineJobs with label "env" set to "prod".
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The standard list page size.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// The standard list page token.
/// Typically obtained via
/// \[ListPipelineJobsResponse.next_page_token][google.cloud.aiplatform.v1.ListPipelineJobsResponse.next_page_token\] of the previous
/// \[PipelineService.ListPipelineJobs][google.cloud.aiplatform.v1.PipelineService.ListPipelineJobs\] call.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// A comma-separated list of fields to order by. The default sort order is in
/// ascending order. Use "desc" after a field name for descending. You can have
/// multiple order_by fields provided e.g. "create_time desc, end_time",
/// "end_time, start_time, update_time" For example, using "create_time desc,
/// end_time" will order results by create time in descending order, and if
/// there are multiple jobs having the same create time, order them by the end
/// time in ascending order. if order_by is not specified, it will order by
/// default order is create time in descending order. Supported fields:
/// * `create_time`
/// * `update_time`
/// * `end_time`
/// * `start_time`
#[prost(string, tag = "6")]
pub order_by: ::prost::alloc::string::String,
}
/// Response message for \[PipelineService.ListPipelineJobs][google.cloud.aiplatform.v1.PipelineService.ListPipelineJobs\]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListPipelineJobsResponse {
/// List of PipelineJobs in the requested page.
#[prost(message, repeated, tag = "1")]
pub pipeline_jobs: ::prost::alloc::vec::Vec<PipelineJob>,
/// A token to retrieve the next page of results.
/// Pass to \[ListPipelineJobsRequest.page_token][google.cloud.aiplatform.v1.ListPipelineJobsRequest.page_token\] to obtain that page.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[PipelineService.DeletePipelineJob][google.cloud.aiplatform.v1.PipelineService.DeletePipelineJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeletePipelineJobRequest {
/// Required. The name of the PipelineJob resource to be deleted.
/// Format:
/// `projects/{project}/locations/{location}/pipelineJobs/{pipeline_job}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[PipelineService.CancelPipelineJob][google.cloud.aiplatform.v1.PipelineService.CancelPipelineJob\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CancelPipelineJobRequest {
/// Required. The name of the PipelineJob to cancel.
/// Format:
/// `projects/{project}/locations/{location}/pipelineJobs/{pipeline_job}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
#[doc = r" Generated client implementations."]
pub mod pipeline_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " A service for creating and managing Vertex AI's pipelines. This includes both"]
#[doc = " `TrainingPipeline` resources (used for AutoML and custom training) and"]
#[doc = " `PipelineJob` resources (used for Vertex Pipelines)."]
#[derive(Debug, Clone)]
pub struct PipelineServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> PipelineServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> PipelineServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
PipelineServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Creates a TrainingPipeline. A created TrainingPipeline right away will be"]
#[doc = " attempted to be run."]
pub async fn create_training_pipeline(
&mut self,
request: impl tonic::IntoRequest<super::CreateTrainingPipelineRequest>,
) -> Result<tonic::Response<super::TrainingPipeline>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.PipelineService/CreateTrainingPipeline",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets a TrainingPipeline."]
pub async fn get_training_pipeline(
&mut self,
request: impl tonic::IntoRequest<super::GetTrainingPipelineRequest>,
) -> Result<tonic::Response<super::TrainingPipeline>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.PipelineService/GetTrainingPipeline",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists TrainingPipelines in a Location."]
pub async fn list_training_pipelines(
&mut self,
request: impl tonic::IntoRequest<super::ListTrainingPipelinesRequest>,
) -> Result<tonic::Response<super::ListTrainingPipelinesResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.PipelineService/ListTrainingPipelines",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a TrainingPipeline."]
pub async fn delete_training_pipeline(
&mut self,
request: impl tonic::IntoRequest<super::DeleteTrainingPipelineRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.PipelineService/DeleteTrainingPipeline",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Cancels a TrainingPipeline."]
#[doc = " Starts asynchronous cancellation on the TrainingPipeline. The server"]
#[doc = " makes a best effort to cancel the pipeline, but success is not"]
#[doc = " guaranteed. Clients can use [PipelineService.GetTrainingPipeline][google.cloud.aiplatform.v1.PipelineService.GetTrainingPipeline] or"]
#[doc = " other methods to check whether the cancellation succeeded or whether the"]
#[doc = " pipeline completed despite cancellation. On successful cancellation,"]
#[doc = " the TrainingPipeline is not deleted; instead it becomes a pipeline with"]
#[doc = " a [TrainingPipeline.error][google.cloud.aiplatform.v1.TrainingPipeline.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1,"]
#[doc = " corresponding to `Code.CANCELLED`, and [TrainingPipeline.state][google.cloud.aiplatform.v1.TrainingPipeline.state] is set to"]
#[doc = " `CANCELLED`."]
pub async fn cancel_training_pipeline(
&mut self,
request: impl tonic::IntoRequest<super::CancelTrainingPipelineRequest>,
) -> Result<tonic::Response<()>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.PipelineService/CancelTrainingPipeline",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Creates a PipelineJob. A PipelineJob will run immediately when created."]
pub async fn create_pipeline_job(
&mut self,
request: impl tonic::IntoRequest<super::CreatePipelineJobRequest>,
) -> Result<tonic::Response<super::PipelineJob>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.PipelineService/CreatePipelineJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets a PipelineJob."]
pub async fn get_pipeline_job(
&mut self,
request: impl tonic::IntoRequest<super::GetPipelineJobRequest>,
) -> Result<tonic::Response<super::PipelineJob>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.PipelineService/GetPipelineJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists PipelineJobs in a Location."]
pub async fn list_pipeline_jobs(
&mut self,
request: impl tonic::IntoRequest<super::ListPipelineJobsRequest>,
) -> Result<tonic::Response<super::ListPipelineJobsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.PipelineService/ListPipelineJobs",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a PipelineJob."]
pub async fn delete_pipeline_job(
&mut self,
request: impl tonic::IntoRequest<super::DeletePipelineJobRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.PipelineService/DeletePipelineJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Cancels a PipelineJob."]
#[doc = " Starts asynchronous cancellation on the PipelineJob. The server"]
#[doc = " makes a best effort to cancel the pipeline, but success is not"]
#[doc = " guaranteed. Clients can use [PipelineService.GetPipelineJob][google.cloud.aiplatform.v1.PipelineService.GetPipelineJob] or"]
#[doc = " other methods to check whether the cancellation succeeded or whether the"]
#[doc = " pipeline completed despite cancellation. On successful cancellation,"]
#[doc = " the PipelineJob is not deleted; instead it becomes a pipeline with"]
#[doc = " a [PipelineJob.error][google.cloud.aiplatform.v1.PipelineJob.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1,"]
#[doc = " corresponding to `Code.CANCELLED`, and [PipelineJob.state][google.cloud.aiplatform.v1.PipelineJob.state] is set to"]
#[doc = " `CANCELLED`."]
pub async fn cancel_pipeline_job(
&mut self,
request: impl tonic::IntoRequest<super::CancelPipelineJobRequest>,
) -> Result<tonic::Response<()>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.PipelineService/CancelPipelineJob",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
}
/// Request message for \[PredictionService.Predict][google.cloud.aiplatform.v1.PredictionService.Predict\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PredictRequest {
/// Required. The name of the Endpoint requested to serve the prediction.
/// Format:
/// `projects/{project}/locations/{location}/endpoints/{endpoint}`
#[prost(string, tag = "1")]
pub endpoint: ::prost::alloc::string::String,
/// Required. The instances that are the input to the prediction call.
/// A DeployedModel may have an upper limit on the number of instances it
/// supports per request, and when it is exceeded the prediction call errors
/// in case of AutoML Models, or, in case of customer created Models, the
/// behaviour is as documented by that Model.
/// The schema of any single instance may be specified via Endpoint's
/// DeployedModels' \[Model's][google.cloud.aiplatform.v1.DeployedModel.model\]
/// \[PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata\]
/// \[instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri\].
#[prost(message, repeated, tag = "2")]
pub instances: ::prost::alloc::vec::Vec<::prost_types::Value>,
/// The parameters that govern the prediction. The schema of the parameters may
/// be specified via Endpoint's DeployedModels' [Model's ]\[google.cloud.aiplatform.v1.DeployedModel.model\]
/// \[PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata\]
/// \[parameters_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri\].
#[prost(message, optional, tag = "3")]
pub parameters: ::core::option::Option<::prost_types::Value>,
}
/// Response message for \[PredictionService.Predict][google.cloud.aiplatform.v1.PredictionService.Predict\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct PredictResponse {
/// The predictions that are the output of the predictions call.
/// The schema of any single prediction may be specified via Endpoint's
/// DeployedModels' [Model's ]\[google.cloud.aiplatform.v1.DeployedModel.model\]
/// \[PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata\]
/// \[prediction_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.prediction_schema_uri\].
#[prost(message, repeated, tag = "1")]
pub predictions: ::prost::alloc::vec::Vec<::prost_types::Value>,
/// ID of the Endpoint's DeployedModel that served this prediction.
#[prost(string, tag = "2")]
pub deployed_model_id: ::prost::alloc::string::String,
/// Output only. The resource name of the Model which is deployed as the DeployedModel that
/// this prediction hits.
#[prost(string, tag = "3")]
pub model: ::prost::alloc::string::String,
/// Output only. The [display name]\[google.cloud.aiplatform.v1.Model.display_name\] of the Model which is deployed as
/// the DeployedModel that this prediction hits.
#[prost(string, tag = "4")]
pub model_display_name: ::prost::alloc::string::String,
}
/// Request message for \[PredictionService.RawPredict][google.cloud.aiplatform.v1.PredictionService.RawPredict\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct RawPredictRequest {
/// Required. The name of the Endpoint requested to serve the prediction.
/// Format:
/// `projects/{project}/locations/{location}/endpoints/{endpoint}`
#[prost(string, tag = "1")]
pub endpoint: ::prost::alloc::string::String,
/// The prediction input. Supports HTTP headers and arbitrary data payload.
///
/// A \[DeployedModel][google.cloud.aiplatform.v1.DeployedModel\] may have an upper limit on the number of instances it
/// supports per request. When this limit it is exceeded for an AutoML model,
/// the \[RawPredict][google.cloud.aiplatform.v1.PredictionService.RawPredict\] method returns an error.
/// When this limit is exceeded for a custom-trained model, the behavior varies
/// depending on the model.
///
/// You can specify the schema for each instance in the
/// \[predict_schemata.instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri\]
/// field when you create a \[Model][google.cloud.aiplatform.v1.Model\]. This schema applies when you deploy the
/// `Model` as a `DeployedModel` to an \[Endpoint][google.cloud.aiplatform.v1.Endpoint\] and use the `RawPredict`
/// method.
#[prost(message, optional, tag = "2")]
pub http_body: ::core::option::Option<super::super::super::api::HttpBody>,
}
/// Request message for \[PredictionService.Explain][google.cloud.aiplatform.v1.PredictionService.Explain\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExplainRequest {
/// Required. The name of the Endpoint requested to serve the explanation.
/// Format:
/// `projects/{project}/locations/{location}/endpoints/{endpoint}`
#[prost(string, tag = "1")]
pub endpoint: ::prost::alloc::string::String,
/// Required. The instances that are the input to the explanation call.
/// A DeployedModel may have an upper limit on the number of instances it
/// supports per request, and when it is exceeded the explanation call errors
/// in case of AutoML Models, or, in case of customer created Models, the
/// behaviour is as documented by that Model.
/// The schema of any single instance may be specified via Endpoint's
/// DeployedModels' \[Model's][google.cloud.aiplatform.v1.DeployedModel.model\]
/// \[PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata\]
/// \[instance_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.instance_schema_uri\].
#[prost(message, repeated, tag = "2")]
pub instances: ::prost::alloc::vec::Vec<::prost_types::Value>,
/// The parameters that govern the prediction. The schema of the parameters may
/// be specified via Endpoint's DeployedModels' [Model's ]\[google.cloud.aiplatform.v1.DeployedModel.model\]
/// \[PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata\]
/// \[parameters_schema_uri][google.cloud.aiplatform.v1.PredictSchemata.parameters_schema_uri\].
#[prost(message, optional, tag = "4")]
pub parameters: ::core::option::Option<::prost_types::Value>,
/// If specified, overrides the
/// \[explanation_spec][google.cloud.aiplatform.v1.DeployedModel.explanation_spec\] of the DeployedModel.
/// Can be used for explaining prediction results with different
/// configurations, such as:
/// - Explaining top-5 predictions results as opposed to top-1;
/// - Increasing path count or step count of the attribution methods to reduce
/// approximate errors;
/// - Using different baselines for explaining the prediction results.
#[prost(message, optional, tag = "5")]
pub explanation_spec_override: ::core::option::Option<ExplanationSpecOverride>,
/// If specified, this ExplainRequest will be served by the chosen
/// DeployedModel, overriding \[Endpoint.traffic_split][google.cloud.aiplatform.v1.Endpoint.traffic_split\].
#[prost(string, tag = "3")]
pub deployed_model_id: ::prost::alloc::string::String,
}
/// Response message for \[PredictionService.Explain][google.cloud.aiplatform.v1.PredictionService.Explain\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExplainResponse {
/// The explanations of the Model's \[PredictResponse.predictions][google.cloud.aiplatform.v1.PredictResponse.predictions\].
///
/// It has the same number of elements as \[instances][google.cloud.aiplatform.v1.ExplainRequest.instances\]
/// to be explained.
#[prost(message, repeated, tag = "1")]
pub explanations: ::prost::alloc::vec::Vec<Explanation>,
/// ID of the Endpoint's DeployedModel that served this explanation.
#[prost(string, tag = "2")]
pub deployed_model_id: ::prost::alloc::string::String,
/// The predictions that are the output of the predictions call.
/// Same as \[PredictResponse.predictions][google.cloud.aiplatform.v1.PredictResponse.predictions\].
#[prost(message, repeated, tag = "3")]
pub predictions: ::prost::alloc::vec::Vec<::prost_types::Value>,
}
#[doc = r" Generated client implementations."]
pub mod prediction_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " A service for online predictions and explanations."]
#[derive(Debug, Clone)]
pub struct PredictionServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> PredictionServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> PredictionServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
PredictionServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Perform an online prediction."]
pub async fn predict(
&mut self,
request: impl tonic::IntoRequest<super::PredictRequest>,
) -> Result<tonic::Response<super::PredictResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.PredictionService/Predict",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Perform an online prediction with an arbitrary HTTP payload."]
#[doc = ""]
#[doc = " The response includes the following HTTP headers:"]
#[doc = ""]
#[doc = " * `X-Vertex-AI-Endpoint-Id`: ID of the [Endpoint][google.cloud.aiplatform.v1.Endpoint] that served this"]
#[doc = " prediction."]
#[doc = ""]
#[doc = " * `X-Vertex-AI-Deployed-Model-Id`: ID of the Endpoint's [DeployedModel][google.cloud.aiplatform.v1.DeployedModel]"]
#[doc = " that served this prediction."]
pub async fn raw_predict(
&mut self,
request: impl tonic::IntoRequest<super::RawPredictRequest>,
) -> Result<tonic::Response<super::super::super::super::api::HttpBody>, tonic::Status>
{
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.PredictionService/RawPredict",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Perform an online explanation."]
#[doc = ""]
#[doc = " If [deployed_model_id][google.cloud.aiplatform.v1.ExplainRequest.deployed_model_id] is specified,"]
#[doc = " the corresponding DeployModel must have"]
#[doc = " [explanation_spec][google.cloud.aiplatform.v1.DeployedModel.explanation_spec]"]
#[doc = " populated. If [deployed_model_id][google.cloud.aiplatform.v1.ExplainRequest.deployed_model_id]"]
#[doc = " is not specified, all DeployedModels must have"]
#[doc = " [explanation_spec][google.cloud.aiplatform.v1.DeployedModel.explanation_spec]"]
#[doc = " populated. Only deployed AutoML tabular Models have"]
#[doc = " explanation_spec."]
pub async fn explain(
&mut self,
request: impl tonic::IntoRequest<super::ExplainRequest>,
) -> Result<tonic::Response<super::ExplainResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.PredictionService/Explain",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
}
/// Request message for \[SpecialistPoolService.CreateSpecialistPool][google.cloud.aiplatform.v1.SpecialistPoolService.CreateSpecialistPool\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateSpecialistPoolRequest {
/// Required. The parent Project name for the new SpecialistPool.
/// The form is `projects/{project}/locations/{location}`.
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The SpecialistPool to create.
#[prost(message, optional, tag = "2")]
pub specialist_pool: ::core::option::Option<SpecialistPool>,
}
/// Runtime operation information for
/// \[SpecialistPoolService.CreateSpecialistPool][google.cloud.aiplatform.v1.SpecialistPoolService.CreateSpecialistPool\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateSpecialistPoolOperationMetadata {
/// The operation generic information.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
/// Request message for \[SpecialistPoolService.GetSpecialistPool][google.cloud.aiplatform.v1.SpecialistPoolService.GetSpecialistPool\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetSpecialistPoolRequest {
/// Required. The name of the SpecialistPool resource.
/// The form is
/// `projects/{project}/locations/{location}/specialistPools/{specialist_pool}`.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[SpecialistPoolService.ListSpecialistPools][google.cloud.aiplatform.v1.SpecialistPoolService.ListSpecialistPools\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListSpecialistPoolsRequest {
/// Required. The name of the SpecialistPool's parent resource.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The standard list page size.
#[prost(int32, tag = "2")]
pub page_size: i32,
/// The standard list page token.
/// Typically obtained by \[ListSpecialistPoolsResponse.next_page_token][google.cloud.aiplatform.v1.ListSpecialistPoolsResponse.next_page_token\] of
/// the previous \[SpecialistPoolService.ListSpecialistPools][google.cloud.aiplatform.v1.SpecialistPoolService.ListSpecialistPools\] call. Return
/// first page if empty.
#[prost(string, tag = "3")]
pub page_token: ::prost::alloc::string::String,
/// Mask specifying which fields to read. FieldMask represents a set of
#[prost(message, optional, tag = "4")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Response message for \[SpecialistPoolService.ListSpecialistPools][google.cloud.aiplatform.v1.SpecialistPoolService.ListSpecialistPools\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListSpecialistPoolsResponse {
/// A list of SpecialistPools that matches the specified filter in the request.
#[prost(message, repeated, tag = "1")]
pub specialist_pools: ::prost::alloc::vec::Vec<SpecialistPool>,
/// The standard List next-page token.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[SpecialistPoolService.DeleteSpecialistPool][google.cloud.aiplatform.v1.SpecialistPoolService.DeleteSpecialistPool\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteSpecialistPoolRequest {
/// Required. The resource name of the SpecialistPool to delete. Format:
/// `projects/{project}/locations/{location}/specialistPools/{specialist_pool}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// If set to true, any specialist managers in this SpecialistPool will also be
/// deleted. (Otherwise, the request will only work if the SpecialistPool has
/// no specialist managers.)
#[prost(bool, tag = "2")]
pub force: bool,
}
/// Request message for \[SpecialistPoolService.UpdateSpecialistPool][google.cloud.aiplatform.v1.SpecialistPoolService.UpdateSpecialistPool\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateSpecialistPoolRequest {
/// Required. The SpecialistPool which replaces the resource on the server.
#[prost(message, optional, tag = "1")]
pub specialist_pool: ::core::option::Option<SpecialistPool>,
/// Required. The update mask applies to the resource.
#[prost(message, optional, tag = "2")]
pub update_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Runtime operation metadata for
/// \[SpecialistPoolService.UpdateSpecialistPool][google.cloud.aiplatform.v1.SpecialistPoolService.UpdateSpecialistPool\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateSpecialistPoolOperationMetadata {
/// Output only. The name of the SpecialistPool to which the specialists are being added.
/// Format:
/// `projects/{project_id}/locations/{location_id}/specialistPools/{specialist_pool}`
#[prost(string, tag = "1")]
pub specialist_pool: ::prost::alloc::string::String,
/// The operation generic information.
#[prost(message, optional, tag = "2")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
#[doc = r" Generated client implementations."]
pub mod specialist_pool_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " A service for creating and managing Customer SpecialistPools."]
#[doc = " When customers start Data Labeling jobs, they can reuse/create Specialist"]
#[doc = " Pools to bring their own Specialists to label the data."]
#[doc = " Customers can add/remove Managers for the Specialist Pool on Cloud console,"]
#[doc = " then Managers will get email notifications to manage Specialists and tasks on"]
#[doc = " CrowdCompute console."]
#[derive(Debug, Clone)]
pub struct SpecialistPoolServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> SpecialistPoolServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> SpecialistPoolServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
SpecialistPoolServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Creates a SpecialistPool."]
pub async fn create_specialist_pool(
&mut self,
request: impl tonic::IntoRequest<super::CreateSpecialistPoolRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.SpecialistPoolService/CreateSpecialistPool",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets a SpecialistPool."]
pub async fn get_specialist_pool(
&mut self,
request: impl tonic::IntoRequest<super::GetSpecialistPoolRequest>,
) -> Result<tonic::Response<super::SpecialistPool>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.SpecialistPoolService/GetSpecialistPool",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists SpecialistPools in a Location."]
pub async fn list_specialist_pools(
&mut self,
request: impl tonic::IntoRequest<super::ListSpecialistPoolsRequest>,
) -> Result<tonic::Response<super::ListSpecialistPoolsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.SpecialistPoolService/ListSpecialistPools",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a SpecialistPool as well as all Specialists in the pool."]
pub async fn delete_specialist_pool(
&mut self,
request: impl tonic::IntoRequest<super::DeleteSpecialistPoolRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.SpecialistPoolService/DeleteSpecialistPool",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates a SpecialistPool."]
pub async fn update_specialist_pool(
&mut self,
request: impl tonic::IntoRequest<super::UpdateSpecialistPoolRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.SpecialistPoolService/UpdateSpecialistPool",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
}
/// Tensorboard is a physical database that stores users' training metrics.
/// A default Tensorboard is provided in each region of a GCP project.
/// If needed users can also create extra Tensorboards in their projects.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Tensorboard {
/// Output only. Name of the Tensorboard.
/// Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. User provided name of this Tensorboard.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// Description of this Tensorboard.
#[prost(string, tag = "3")]
pub description: ::prost::alloc::string::String,
/// Customer-managed encryption key spec for a Tensorboard. If set, this
/// Tensorboard and all sub-resources of this Tensorboard will be secured by
/// this key.
#[prost(message, optional, tag = "11")]
pub encryption_spec: ::core::option::Option<EncryptionSpec>,
/// Output only. Consumer project Cloud Storage path prefix used to store blob data, which
/// can either be a bucket or directory. Does not end with a '/'.
#[prost(string, tag = "10")]
pub blob_storage_path_prefix: ::prost::alloc::string::String,
/// Output only. The number of Runs stored in this Tensorboard.
#[prost(int32, tag = "5")]
pub run_count: i32,
/// Output only. Timestamp when this Tensorboard was created.
#[prost(message, optional, tag = "6")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this Tensorboard was last updated.
#[prost(message, optional, tag = "7")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// The labels with user-defined metadata to organize your Tensorboards.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
/// No more than 64 user labels can be associated with one Tensorboard
/// (System labels are excluded).
///
/// See <https://goo.gl/xmQnxf> for more information and examples of labels.
/// System reserved label keys are prefixed with "aiplatform.googleapis.com/"
/// and are immutable.
#[prost(map = "string, string", tag = "8")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Used to perform a consistent read-modify-write updates. If not set, a blind
/// "overwrite" update happens.
#[prost(string, tag = "9")]
pub etag: ::prost::alloc::string::String,
}
/// TensorboardTimeSeries maps to times series produced in training runs
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TensorboardTimeSeries {
/// Output only. Name of the TensorboardTimeSeries.
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. User provided name of this TensorboardTimeSeries.
/// This value should be unique among all TensorboardTimeSeries resources
/// belonging to the same TensorboardRun resource (parent resource).
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// Description of this TensorboardTimeSeries.
#[prost(string, tag = "3")]
pub description: ::prost::alloc::string::String,
/// Required. Immutable. Type of TensorboardTimeSeries value.
#[prost(enumeration = "tensorboard_time_series::ValueType", tag = "4")]
pub value_type: i32,
/// Output only. Timestamp when this TensorboardTimeSeries was created.
#[prost(message, optional, tag = "5")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this TensorboardTimeSeries was last updated.
#[prost(message, optional, tag = "6")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// Used to perform a consistent read-modify-write updates. If not set, a blind
/// "overwrite" update happens.
#[prost(string, tag = "7")]
pub etag: ::prost::alloc::string::String,
/// Immutable. Name of the plugin this time series pertain to. Such as Scalar, Tensor,
/// Blob
#[prost(string, tag = "8")]
pub plugin_name: ::prost::alloc::string::String,
/// Data of the current plugin, with the size limited to 65KB.
#[prost(bytes = "vec", tag = "9")]
pub plugin_data: ::prost::alloc::vec::Vec<u8>,
/// Output only. Scalar, Tensor, or Blob metadata for this TensorboardTimeSeries.
#[prost(message, optional, tag = "10")]
pub metadata: ::core::option::Option<tensorboard_time_series::Metadata>,
}
/// Nested message and enum types in `TensorboardTimeSeries`.
pub mod tensorboard_time_series {
/// Describes metadata for a TensorboardTimeSeries.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Metadata {
/// Output only. Max step index of all data points within a TensorboardTimeSeries.
#[prost(int64, tag = "1")]
pub max_step: i64,
/// Output only. Max wall clock timestamp of all data points within a
/// TensorboardTimeSeries.
#[prost(message, optional, tag = "2")]
pub max_wall_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. The largest blob sequence length (number of blobs) of all data points in
/// this time series, if its ValueType is BLOB_SEQUENCE.
#[prost(int64, tag = "3")]
pub max_blob_sequence_length: i64,
}
/// An enum representing the value type of a TensorboardTimeSeries.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum ValueType {
/// The value type is unspecified.
Unspecified = 0,
/// Used for TensorboardTimeSeries that is a list of scalars.
/// E.g. accuracy of a model over epochs/time.
Scalar = 1,
/// Used for TensorboardTimeSeries that is a list of tensors.
/// E.g. histograms of weights of layer in a model over epoch/time.
Tensor = 2,
/// Used for TensorboardTimeSeries that is a list of blob sequences.
/// E.g. set of sample images with labels over epochs/time.
BlobSequence = 3,
}
}
/// All the data stored in a TensorboardTimeSeries.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TimeSeriesData {
/// Required. The ID of the TensorboardTimeSeries, which will become the final component
/// of the TensorboardTimeSeries' resource name
#[prost(string, tag = "1")]
pub tensorboard_time_series_id: ::prost::alloc::string::String,
/// Required. Immutable. The value type of this time series. All the values in this time series data
/// must match this value type.
#[prost(enumeration = "tensorboard_time_series::ValueType", tag = "2")]
pub value_type: i32,
/// Required. Data points in this time series.
#[prost(message, repeated, tag = "3")]
pub values: ::prost::alloc::vec::Vec<TimeSeriesDataPoint>,
}
/// A TensorboardTimeSeries data point.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TimeSeriesDataPoint {
/// Wall clock timestamp when this data point is generated by the end user.
#[prost(message, optional, tag = "1")]
pub wall_time: ::core::option::Option<::prost_types::Timestamp>,
/// Step index of this data point within the run.
#[prost(int64, tag = "2")]
pub step: i64,
/// Value of this time series data point.
#[prost(oneof = "time_series_data_point::Value", tags = "3, 4, 5")]
pub value: ::core::option::Option<time_series_data_point::Value>,
}
/// Nested message and enum types in `TimeSeriesDataPoint`.
pub mod time_series_data_point {
/// Value of this time series data point.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Value {
/// A scalar value.
#[prost(message, tag = "3")]
Scalar(super::Scalar),
/// A tensor value.
#[prost(message, tag = "4")]
Tensor(super::TensorboardTensor),
/// A blob sequence value.
#[prost(message, tag = "5")]
Blobs(super::TensorboardBlobSequence),
}
}
/// One point viewable on a scalar metric plot.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Scalar {
/// Value of the point at this step / timestamp.
#[prost(double, tag = "1")]
pub value: f64,
}
/// One point viewable on a tensor metric plot.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TensorboardTensor {
/// Required. Serialized form of
/// <https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/framework/tensor.proto>
#[prost(bytes = "vec", tag = "1")]
pub value: ::prost::alloc::vec::Vec<u8>,
/// Optional. Version number of TensorProto used to serialize \[value][google.cloud.aiplatform.v1.TensorboardTensor.value\].
#[prost(int32, tag = "2")]
pub version_number: i32,
}
/// One point viewable on a blob metric plot, but mostly just a wrapper message
/// to work around repeated fields can't be used directly within `oneof` fields.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TensorboardBlobSequence {
/// List of blobs contained within the sequence.
#[prost(message, repeated, tag = "1")]
pub values: ::prost::alloc::vec::Vec<TensorboardBlob>,
}
/// One blob (e.g, image, graph) viewable on a blob metric plot.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TensorboardBlob {
/// Output only. A URI safe key uniquely identifying a blob. Can be used to locate the blob
/// stored in the Cloud Storage bucket of the consumer project.
#[prost(string, tag = "1")]
pub id: ::prost::alloc::string::String,
/// Optional. The bytes of the blob is not present unless it's returned by the
/// ReadTensorboardBlobData endpoint.
#[prost(bytes = "vec", tag = "2")]
pub data: ::prost::alloc::vec::Vec<u8>,
}
/// A TensorboardExperiment is a group of TensorboardRuns, that are typically the
/// results of a training job run, in a Tensorboard.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TensorboardExperiment {
/// Output only. Name of the TensorboardExperiment.
/// Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// User provided name of this TensorboardExperiment.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// Description of this TensorboardExperiment.
#[prost(string, tag = "3")]
pub description: ::prost::alloc::string::String,
/// Output only. Timestamp when this TensorboardExperiment was created.
#[prost(message, optional, tag = "4")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this TensorboardExperiment was last updated.
#[prost(message, optional, tag = "5")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// The labels with user-defined metadata to organize your Datasets.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
/// No more than 64 user labels can be associated with one Dataset (System
/// labels are excluded).
///
/// See <https://goo.gl/xmQnxf> for more information and examples of labels.
/// System reserved label keys are prefixed with "aiplatform.googleapis.com/"
/// and are immutable. Following system labels exist for each Dataset:
/// * "aiplatform.googleapis.com/dataset_metadata_schema":
/// - output only, its value is the
/// \[metadata_schema's][metadata_schema_uri\] title.
#[prost(map = "string, string", tag = "6")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Used to perform consistent read-modify-write updates. If not set, a blind
/// "overwrite" update happens.
#[prost(string, tag = "7")]
pub etag: ::prost::alloc::string::String,
/// Immutable. Source of the TensorboardExperiment. Example: a custom training job.
#[prost(string, tag = "8")]
pub source: ::prost::alloc::string::String,
}
/// TensorboardRun maps to a specific execution of a training job with a given
/// set of hyperparameter values, model definition, dataset, etc
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TensorboardRun {
/// Output only. Name of the TensorboardRun.
/// Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Required. User provided name of this TensorboardRun.
/// This value must be unique among all TensorboardRuns
/// belonging to the same parent TensorboardExperiment.
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
/// Description of this TensorboardRun.
#[prost(string, tag = "3")]
pub description: ::prost::alloc::string::String,
/// Output only. Timestamp when this TensorboardRun was created.
#[prost(message, optional, tag = "6")]
pub create_time: ::core::option::Option<::prost_types::Timestamp>,
/// Output only. Timestamp when this TensorboardRun was last updated.
#[prost(message, optional, tag = "7")]
pub update_time: ::core::option::Option<::prost_types::Timestamp>,
/// The labels with user-defined metadata to organize your TensorboardRuns.
///
/// This field will be used to filter and visualize Runs in the Tensorboard UI.
/// For example, a Vertex AI training job can set a label
/// aiplatform.googleapis.com/training_job_id=xxxxx to all the runs created
/// within that job. An end user can set a label experiment_id=xxxxx for all
/// the runs produced in a Jupyter notebook. These runs can be grouped by a
/// label value and visualized together in the Tensorboard UI.
///
/// Label keys and values can be no longer than 64 characters
/// (Unicode codepoints), can only contain lowercase letters, numeric
/// characters, underscores and dashes. International characters are allowed.
/// No more than 64 user labels can be associated with one TensorboardRun
/// (System labels are excluded).
///
/// See <https://goo.gl/xmQnxf> for more information and examples of labels.
/// System reserved label keys are prefixed with "aiplatform.googleapis.com/"
/// and are immutable.
#[prost(map = "string, string", tag = "8")]
pub labels:
::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
/// Used to perform a consistent read-modify-write updates. If not set, a blind
/// "overwrite" update happens.
#[prost(string, tag = "9")]
pub etag: ::prost::alloc::string::String,
}
/// Request message for \[TensorboardService.CreateTensorboard][google.cloud.aiplatform.v1.TensorboardService.CreateTensorboard\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateTensorboardRequest {
/// Required. The resource name of the Location to create the Tensorboard in.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The Tensorboard to create.
#[prost(message, optional, tag = "2")]
pub tensorboard: ::core::option::Option<Tensorboard>,
}
/// Request message for \[TensorboardService.GetTensorboard][google.cloud.aiplatform.v1.TensorboardService.GetTensorboard\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetTensorboardRequest {
/// Required. The name of the Tensorboard resource.
/// Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[TensorboardService.ListTensorboards][google.cloud.aiplatform.v1.TensorboardService.ListTensorboards\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListTensorboardsRequest {
/// Required. The resource name of the Location to list Tensorboards.
/// Format:
/// `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Lists the Tensorboards that match the filter expression.
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The maximum number of Tensorboards to return. The service may return
/// fewer than this value. If unspecified, at most 100 Tensorboards will be
/// returned. The maximum value is 100; values above 100 will be coerced to
/// 100.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// A page token, received from a previous
/// \[TensorboardService.ListTensorboards][google.cloud.aiplatform.v1.TensorboardService.ListTensorboards\] call.
/// Provide this to retrieve the subsequent page.
///
/// When paginating, all other parameters provided to
/// \[TensorboardService.ListTensorboards][google.cloud.aiplatform.v1.TensorboardService.ListTensorboards\] must
/// match the call that provided the page token.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Field to use to sort the list.
#[prost(string, tag = "5")]
pub order_by: ::prost::alloc::string::String,
/// Mask specifying which fields to read.
#[prost(message, optional, tag = "6")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Response message for \[TensorboardService.ListTensorboards][google.cloud.aiplatform.v1.TensorboardService.ListTensorboards\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListTensorboardsResponse {
/// The Tensorboards mathching the request.
#[prost(message, repeated, tag = "1")]
pub tensorboards: ::prost::alloc::vec::Vec<Tensorboard>,
/// A token, which can be sent as \[ListTensorboardsRequest.page_token][google.cloud.aiplatform.v1.ListTensorboardsRequest.page_token\]
/// to retrieve the next page. If this field is omitted, there are no
/// subsequent pages.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[TensorboardService.UpdateTensorboard][google.cloud.aiplatform.v1.TensorboardService.UpdateTensorboard\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateTensorboardRequest {
/// Required. Field mask is used to specify the fields to be overwritten in the
/// Tensorboard resource by the update.
/// The fields specified in the update_mask are relative to the resource, not
/// the full request. A field will be overwritten if it is in the mask. If the
/// user does not provide a mask then all fields will be overwritten if new
/// values are specified.
#[prost(message, optional, tag = "1")]
pub update_mask: ::core::option::Option<::prost_types::FieldMask>,
/// Required. The Tensorboard's `name` field is used to identify the
/// Tensorboard to be updated. Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}`
#[prost(message, optional, tag = "2")]
pub tensorboard: ::core::option::Option<Tensorboard>,
}
/// Request message for \[TensorboardService.DeleteTensorboard][google.cloud.aiplatform.v1.TensorboardService.DeleteTensorboard\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteTensorboardRequest {
/// Required. The name of the Tensorboard to be deleted.
/// Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[TensorboardService.CreateTensorboardExperiment][google.cloud.aiplatform.v1.TensorboardService.CreateTensorboardExperiment\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateTensorboardExperimentRequest {
/// Required. The resource name of the Tensorboard to create the TensorboardExperiment
/// in. Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// The TensorboardExperiment to create.
#[prost(message, optional, tag = "2")]
pub tensorboard_experiment: ::core::option::Option<TensorboardExperiment>,
/// Required. The ID to use for the Tensorboard experiment, which will become the final
/// component of the Tensorboard experiment's resource name.
///
/// This value should be 1-128 characters, and valid characters
/// are /\[a-z][0-9\]-/.
#[prost(string, tag = "3")]
pub tensorboard_experiment_id: ::prost::alloc::string::String,
}
/// Request message for \[TensorboardService.GetTensorboardExperiment][google.cloud.aiplatform.v1.TensorboardService.GetTensorboardExperiment\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetTensorboardExperimentRequest {
/// Required. The name of the TensorboardExperiment resource.
/// Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[TensorboardService.ListTensorboardExperiments][google.cloud.aiplatform.v1.TensorboardService.ListTensorboardExperiments\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListTensorboardExperimentsRequest {
/// Required. The resource name of the Tensorboard to list TensorboardExperiments.
/// Format:
/// 'projects/{project}/locations/{location}/tensorboards/{tensorboard}'
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Lists the TensorboardExperiments that match the filter expression.
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The maximum number of TensorboardExperiments to return. The service may
/// return fewer than this value. If unspecified, at most 50
/// TensorboardExperiments will be returned. The maximum value is 1000; values
/// above 1000 will be coerced to 1000.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// A page token, received from a previous
/// \[TensorboardService.ListTensorboardExperiments][google.cloud.aiplatform.v1.TensorboardService.ListTensorboardExperiments\] call.
/// Provide this to retrieve the subsequent page.
///
/// When paginating, all other parameters provided to
/// \[TensorboardService.ListTensorboardExperiments][google.cloud.aiplatform.v1.TensorboardService.ListTensorboardExperiments\] must
/// match the call that provided the page token.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Field to use to sort the list.
#[prost(string, tag = "5")]
pub order_by: ::prost::alloc::string::String,
/// Mask specifying which fields to read.
#[prost(message, optional, tag = "6")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Response message for \[TensorboardService.ListTensorboardExperiments][google.cloud.aiplatform.v1.TensorboardService.ListTensorboardExperiments\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListTensorboardExperimentsResponse {
/// The TensorboardExperiments mathching the request.
#[prost(message, repeated, tag = "1")]
pub tensorboard_experiments: ::prost::alloc::vec::Vec<TensorboardExperiment>,
/// A token, which can be sent as
/// \[ListTensorboardExperimentsRequest.page_token][google.cloud.aiplatform.v1.ListTensorboardExperimentsRequest.page_token\] to retrieve the next page.
/// If this field is omitted, there are no subsequent pages.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[TensorboardService.UpdateTensorboardExperiment][google.cloud.aiplatform.v1.TensorboardService.UpdateTensorboardExperiment\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateTensorboardExperimentRequest {
/// Required. Field mask is used to specify the fields to be overwritten in the
/// TensorboardExperiment resource by the update.
/// The fields specified in the update_mask are relative to the resource, not
/// the full request. A field will be overwritten if it is in the mask. If the
/// user does not provide a mask then all fields will be overwritten if new
/// values are specified.
#[prost(message, optional, tag = "1")]
pub update_mask: ::core::option::Option<::prost_types::FieldMask>,
/// Required. The TensorboardExperiment's `name` field is used to identify the
/// TensorboardExperiment to be updated. Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}`
#[prost(message, optional, tag = "2")]
pub tensorboard_experiment: ::core::option::Option<TensorboardExperiment>,
}
/// Request message for \[TensorboardService.DeleteTensorboardExperiment][google.cloud.aiplatform.v1.TensorboardService.DeleteTensorboardExperiment\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteTensorboardExperimentRequest {
/// Required. The name of the TensorboardExperiment to be deleted.
/// Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[TensorboardService.BatchCreateTensorboardRuns][google.cloud.aiplatform.v1.TensorboardService.BatchCreateTensorboardRuns\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BatchCreateTensorboardRunsRequest {
/// Required. The resource name of the TensorboardExperiment to create the
/// TensorboardRuns in. Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}`
/// The parent field in the CreateTensorboardRunRequest messages must match
/// this field.
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The request message specifying the TensorboardRuns to create.
/// A maximum of 1000 TensorboardRuns can be created in a batch.
#[prost(message, repeated, tag = "2")]
pub requests: ::prost::alloc::vec::Vec<CreateTensorboardRunRequest>,
}
/// Response message for \[TensorboardService.BatchCreateTensorboardRuns][google.cloud.aiplatform.v1.TensorboardService.BatchCreateTensorboardRuns\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BatchCreateTensorboardRunsResponse {
/// The created TensorboardRuns.
#[prost(message, repeated, tag = "1")]
pub tensorboard_runs: ::prost::alloc::vec::Vec<TensorboardRun>,
}
/// Request message for \[TensorboardService.CreateTensorboardRun][google.cloud.aiplatform.v1.TensorboardService.CreateTensorboardRun\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateTensorboardRunRequest {
/// Required. The resource name of the TensorboardExperiment to create the TensorboardRun
/// in. Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The TensorboardRun to create.
#[prost(message, optional, tag = "2")]
pub tensorboard_run: ::core::option::Option<TensorboardRun>,
/// Required. The ID to use for the Tensorboard run, which will become the final
/// component of the Tensorboard run's resource name.
///
/// This value should be 1-128 characters, and valid characters
/// are /\[a-z][0-9\]-/.
#[prost(string, tag = "3")]
pub tensorboard_run_id: ::prost::alloc::string::String,
}
/// Request message for \[TensorboardService.GetTensorboardRun][google.cloud.aiplatform.v1.TensorboardService.GetTensorboardRun\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetTensorboardRunRequest {
/// Required. The name of the TensorboardRun resource.
/// Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[TensorboardService.ReadTensorboardBlobData][google.cloud.aiplatform.v1.TensorboardService.ReadTensorboardBlobData\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ReadTensorboardBlobDataRequest {
/// Required. The resource name of the TensorboardTimeSeries to list Blobs.
/// Format:
/// 'projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}'
#[prost(string, tag = "1")]
pub time_series: ::prost::alloc::string::String,
/// IDs of the blobs to read.
#[prost(string, repeated, tag = "2")]
pub blob_ids: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
/// Response message for \[TensorboardService.ReadTensorboardBlobData][google.cloud.aiplatform.v1.TensorboardService.ReadTensorboardBlobData\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ReadTensorboardBlobDataResponse {
/// Blob messages containing blob bytes.
#[prost(message, repeated, tag = "1")]
pub blobs: ::prost::alloc::vec::Vec<TensorboardBlob>,
}
/// Request message for \[TensorboardService.ListTensorboardRuns][google.cloud.aiplatform.v1.TensorboardService.ListTensorboardRuns\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListTensorboardRunsRequest {
/// Required. The resource name of the TensorboardExperiment to list TensorboardRuns.
/// Format:
/// 'projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}'
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Lists the TensorboardRuns that match the filter expression.
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The maximum number of TensorboardRuns to return. The service may return
/// fewer than this value. If unspecified, at most 50 TensorboardRuns will be
/// returned. The maximum value is 1000; values above 1000 will be coerced to
/// 1000.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// A page token, received from a previous
/// \[TensorboardService.ListTensorboardRuns][google.cloud.aiplatform.v1.TensorboardService.ListTensorboardRuns\] call.
/// Provide this to retrieve the subsequent page.
///
/// When paginating, all other parameters provided to
/// \[TensorboardService.ListTensorboardRuns][google.cloud.aiplatform.v1.TensorboardService.ListTensorboardRuns\] must
/// match the call that provided the page token.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Field to use to sort the list.
#[prost(string, tag = "5")]
pub order_by: ::prost::alloc::string::String,
/// Mask specifying which fields to read.
#[prost(message, optional, tag = "6")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Response message for \[TensorboardService.ListTensorboardRuns][google.cloud.aiplatform.v1.TensorboardService.ListTensorboardRuns\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListTensorboardRunsResponse {
/// The TensorboardRuns mathching the request.
#[prost(message, repeated, tag = "1")]
pub tensorboard_runs: ::prost::alloc::vec::Vec<TensorboardRun>,
/// A token, which can be sent as \[ListTensorboardRunsRequest.page_token][google.cloud.aiplatform.v1.ListTensorboardRunsRequest.page_token\] to
/// retrieve the next page.
/// If this field is omitted, there are no subsequent pages.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[TensorboardService.UpdateTensorboardRun][google.cloud.aiplatform.v1.TensorboardService.UpdateTensorboardRun\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateTensorboardRunRequest {
/// Required. Field mask is used to specify the fields to be overwritten in the
/// TensorboardRun resource by the update.
/// The fields specified in the update_mask are relative to the resource, not
/// the full request. A field will be overwritten if it is in the mask. If the
/// user does not provide a mask then all fields will be overwritten if new
/// values are specified.
#[prost(message, optional, tag = "1")]
pub update_mask: ::core::option::Option<::prost_types::FieldMask>,
/// Required. The TensorboardRun's `name` field is used to identify the TensorboardRun to
/// be updated. Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}`
#[prost(message, optional, tag = "2")]
pub tensorboard_run: ::core::option::Option<TensorboardRun>,
}
/// Request message for \[TensorboardService.DeleteTensorboardRun][google.cloud.aiplatform.v1.TensorboardService.DeleteTensorboardRun\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteTensorboardRunRequest {
/// Required. The name of the TensorboardRun to be deleted.
/// Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[TensorboardService.BatchCreateTensorboardTimeSeries][google.cloud.aiplatform.v1.TensorboardService.BatchCreateTensorboardTimeSeries\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BatchCreateTensorboardTimeSeriesRequest {
/// Required. The resource name of the TensorboardExperiment to create the
/// TensorboardTimeSeries in.
/// Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}`
/// The TensorboardRuns referenced by the parent fields in the
/// CreateTensorboardTimeSeriesRequest messages must be sub resources of this
/// TensorboardExperiment.
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The request message specifying the TensorboardTimeSeries to create.
/// A maximum of 1000 TensorboardTimeSeries can be created in a batch.
#[prost(message, repeated, tag = "2")]
pub requests: ::prost::alloc::vec::Vec<CreateTensorboardTimeSeriesRequest>,
}
/// Response message for \[TensorboardService.BatchCreateTensorboardTimeSeries][google.cloud.aiplatform.v1.TensorboardService.BatchCreateTensorboardTimeSeries\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BatchCreateTensorboardTimeSeriesResponse {
/// The created TensorboardTimeSeries.
#[prost(message, repeated, tag = "1")]
pub tensorboard_time_series: ::prost::alloc::vec::Vec<TensorboardTimeSeries>,
}
/// Request message for \[TensorboardService.CreateTensorboardTimeSeries][google.cloud.aiplatform.v1.TensorboardService.CreateTensorboardTimeSeries\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateTensorboardTimeSeriesRequest {
/// Required. The resource name of the TensorboardRun to create the
/// TensorboardTimeSeries in.
/// Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Optional. The user specified unique ID to use for the TensorboardTimeSeries, which
/// will become the final component of the TensorboardTimeSeries's resource
/// name.
/// This value should match "\[a-z0-9][a-z0-9-\]{0, 127}"
#[prost(string, tag = "3")]
pub tensorboard_time_series_id: ::prost::alloc::string::String,
/// Required. The TensorboardTimeSeries to create.
#[prost(message, optional, tag = "2")]
pub tensorboard_time_series: ::core::option::Option<TensorboardTimeSeries>,
}
/// Request message for \[TensorboardService.GetTensorboardTimeSeries][google.cloud.aiplatform.v1.TensorboardService.GetTensorboardTimeSeries\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetTensorboardTimeSeriesRequest {
/// Required. The name of the TensorboardTimeSeries resource.
/// Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[TensorboardService.ListTensorboardTimeSeries][google.cloud.aiplatform.v1.TensorboardService.ListTensorboardTimeSeries\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListTensorboardTimeSeriesRequest {
/// Required. The resource name of the TensorboardRun to list TensorboardTimeSeries.
/// Format:
/// 'projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}'
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Lists the TensorboardTimeSeries that match the filter expression.
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The maximum number of TensorboardTimeSeries to return. The service may
/// return fewer than this value. If unspecified, at most 50
/// TensorboardTimeSeries will be returned. The maximum value is 1000; values
/// above 1000 will be coerced to 1000.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// A page token, received from a previous
/// \[TensorboardService.ListTensorboardTimeSeries][google.cloud.aiplatform.v1.TensorboardService.ListTensorboardTimeSeries\] call.
/// Provide this to retrieve the subsequent page.
///
/// When paginating, all other parameters provided to
/// \[TensorboardService.ListTensorboardTimeSeries][google.cloud.aiplatform.v1.TensorboardService.ListTensorboardTimeSeries\] must
/// match the call that provided the page token.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Field to use to sort the list.
#[prost(string, tag = "5")]
pub order_by: ::prost::alloc::string::String,
/// Mask specifying which fields to read.
#[prost(message, optional, tag = "6")]
pub read_mask: ::core::option::Option<::prost_types::FieldMask>,
}
/// Response message for \[TensorboardService.ListTensorboardTimeSeries][google.cloud.aiplatform.v1.TensorboardService.ListTensorboardTimeSeries\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListTensorboardTimeSeriesResponse {
/// The TensorboardTimeSeries mathching the request.
#[prost(message, repeated, tag = "1")]
pub tensorboard_time_series: ::prost::alloc::vec::Vec<TensorboardTimeSeries>,
/// A token, which can be sent as
/// \[ListTensorboardTimeSeriesRequest.page_token][google.cloud.aiplatform.v1.ListTensorboardTimeSeriesRequest.page_token\] to retrieve the next page.
/// If this field is omitted, there are no subsequent pages.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[TensorboardService.UpdateTensorboardTimeSeries][google.cloud.aiplatform.v1.TensorboardService.UpdateTensorboardTimeSeries\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateTensorboardTimeSeriesRequest {
/// Required. Field mask is used to specify the fields to be overwritten in the
/// TensorboardTimeSeries resource by the update.
/// The fields specified in the update_mask are relative to the resource, not
/// the full request. A field will be overwritten if it is in the mask. If the
/// user does not provide a mask then all fields will be overwritten if new
/// values are specified.
#[prost(message, optional, tag = "1")]
pub update_mask: ::core::option::Option<::prost_types::FieldMask>,
/// Required. The TensorboardTimeSeries' `name` field is used to identify the
/// TensorboardTimeSeries to be updated.
/// Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
#[prost(message, optional, tag = "2")]
pub tensorboard_time_series: ::core::option::Option<TensorboardTimeSeries>,
}
/// Request message for \[TensorboardService.DeleteTensorboardTimeSeries][google.cloud.aiplatform.v1.TensorboardService.DeleteTensorboardTimeSeries\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteTensorboardTimeSeriesRequest {
/// Required. The name of the TensorboardTimeSeries to be deleted.
/// Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for
/// \[TensorboardService.BatchReadTensorboardTimeSeriesData][google.cloud.aiplatform.v1.TensorboardService.BatchReadTensorboardTimeSeriesData\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BatchReadTensorboardTimeSeriesDataRequest {
/// Required. The resource name of the Tensorboard containing TensorboardTimeSeries to
/// read data from. Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}`.
/// The TensorboardTimeSeries referenced by \[time_series][google.cloud.aiplatform.v1.BatchReadTensorboardTimeSeriesDataRequest.time_series\] must be sub
/// resources of this Tensorboard.
#[prost(string, tag = "1")]
pub tensorboard: ::prost::alloc::string::String,
/// Required. The resource names of the TensorboardTimeSeries to read data from. Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
#[prost(string, repeated, tag = "2")]
pub time_series: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
/// Response message for
/// \[TensorboardService.BatchReadTensorboardTimeSeriesData][google.cloud.aiplatform.v1.TensorboardService.BatchReadTensorboardTimeSeriesData\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BatchReadTensorboardTimeSeriesDataResponse {
/// The returned time series data.
#[prost(message, repeated, tag = "1")]
pub time_series_data: ::prost::alloc::vec::Vec<TimeSeriesData>,
}
/// Request message for \[TensorboardService.ReadTensorboardTimeSeriesData][google.cloud.aiplatform.v1.TensorboardService.ReadTensorboardTimeSeriesData\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ReadTensorboardTimeSeriesDataRequest {
/// Required. The resource name of the TensorboardTimeSeries to read data from.
/// Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
#[prost(string, tag = "1")]
pub tensorboard_time_series: ::prost::alloc::string::String,
/// The maximum number of TensorboardTimeSeries' data to return.
///
/// This value should be a positive integer.
/// This value can be set to -1 to return all data.
#[prost(int32, tag = "2")]
pub max_data_points: i32,
/// Reads the TensorboardTimeSeries' data that match the filter expression.
#[prost(string, tag = "3")]
pub filter: ::prost::alloc::string::String,
}
/// Response message for \[TensorboardService.ReadTensorboardTimeSeriesData][google.cloud.aiplatform.v1.TensorboardService.ReadTensorboardTimeSeriesData\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ReadTensorboardTimeSeriesDataResponse {
/// The returned time series data.
#[prost(message, optional, tag = "1")]
pub time_series_data: ::core::option::Option<TimeSeriesData>,
}
/// Request message for \[TensorboardService.WriteTensorboardExperimentData][google.cloud.aiplatform.v1.TensorboardService.WriteTensorboardExperimentData\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct WriteTensorboardExperimentDataRequest {
/// Required. The resource name of the TensorboardExperiment to write data to.
/// Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}`
#[prost(string, tag = "1")]
pub tensorboard_experiment: ::prost::alloc::string::String,
/// Required. Requests containing per-run TensorboardTimeSeries data to write.
#[prost(message, repeated, tag = "2")]
pub write_run_data_requests: ::prost::alloc::vec::Vec<WriteTensorboardRunDataRequest>,
}
/// Response message for \[TensorboardService.WriteTensorboardExperimentData][google.cloud.aiplatform.v1.TensorboardService.WriteTensorboardExperimentData\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct WriteTensorboardExperimentDataResponse {}
/// Request message for \[TensorboardService.WriteTensorboardRunData][google.cloud.aiplatform.v1.TensorboardService.WriteTensorboardRunData\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct WriteTensorboardRunDataRequest {
/// Required. The resource name of the TensorboardRun to write data to.
/// Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}`
#[prost(string, tag = "1")]
pub tensorboard_run: ::prost::alloc::string::String,
/// Required. The TensorboardTimeSeries data to write.
/// Values with in a time series are indexed by their step value.
/// Repeated writes to the same step will overwrite the existing value for that
/// step.
/// The upper limit of data points per write request is 5000.
#[prost(message, repeated, tag = "2")]
pub time_series_data: ::prost::alloc::vec::Vec<TimeSeriesData>,
}
/// Response message for \[TensorboardService.WriteTensorboardRunData][google.cloud.aiplatform.v1.TensorboardService.WriteTensorboardRunData\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct WriteTensorboardRunDataResponse {}
/// Request message for \[TensorboardService.ExportTensorboardTimeSeriesData][google.cloud.aiplatform.v1.TensorboardService.ExportTensorboardTimeSeriesData\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExportTensorboardTimeSeriesDataRequest {
/// Required. The resource name of the TensorboardTimeSeries to export data from.
/// Format:
/// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
#[prost(string, tag = "1")]
pub tensorboard_time_series: ::prost::alloc::string::String,
/// Exports the TensorboardTimeSeries' data that match the filter expression.
#[prost(string, tag = "2")]
pub filter: ::prost::alloc::string::String,
/// The maximum number of data points to return per page.
/// The default page_size will be 1000. Values must be between 1 and 10000.
/// Values above 10000 will be coerced to 10000.
#[prost(int32, tag = "3")]
pub page_size: i32,
/// A page token, received from a previous
/// \[TensorboardService.ExportTensorboardTimeSeries][\] call.
/// Provide this to retrieve the subsequent page.
///
/// When paginating, all other parameters provided to
/// \[TensorboardService.ExportTensorboardTimeSeries][\] must
/// match the call that provided the page token.
#[prost(string, tag = "4")]
pub page_token: ::prost::alloc::string::String,
/// Field to use to sort the TensorboardTimeSeries' data.
/// By default, TensorboardTimeSeries' data will be returned in a pseudo random
/// order.
#[prost(string, tag = "5")]
pub order_by: ::prost::alloc::string::String,
}
/// Response message for \[TensorboardService.ExportTensorboardTimeSeriesData][google.cloud.aiplatform.v1.TensorboardService.ExportTensorboardTimeSeriesData\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExportTensorboardTimeSeriesDataResponse {
/// The returned time series data points.
#[prost(message, repeated, tag = "1")]
pub time_series_data_points: ::prost::alloc::vec::Vec<TimeSeriesDataPoint>,
/// A token, which can be sent as
/// \[ExportTensorboardTimeSeriesRequest.page_token][\] to retrieve the next
/// page. If this field is omitted, there are no subsequent pages.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Details of operations that perform create Tensorboard.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateTensorboardOperationMetadata {
/// Operation metadata for Tensorboard.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
/// Details of operations that perform update Tensorboard.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateTensorboardOperationMetadata {
/// Operation metadata for Tensorboard.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
}
#[doc = r" Generated client implementations."]
pub mod tensorboard_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " TensorboardService"]
#[derive(Debug, Clone)]
pub struct TensorboardServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> TensorboardServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> TensorboardServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
TensorboardServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Creates a Tensorboard."]
pub async fn create_tensorboard(
&mut self,
request: impl tonic::IntoRequest<super::CreateTensorboardRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/CreateTensorboard",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets a Tensorboard."]
pub async fn get_tensorboard(
&mut self,
request: impl tonic::IntoRequest<super::GetTensorboardRequest>,
) -> Result<tonic::Response<super::Tensorboard>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/GetTensorboard",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates a Tensorboard."]
pub async fn update_tensorboard(
&mut self,
request: impl tonic::IntoRequest<super::UpdateTensorboardRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/UpdateTensorboard",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists Tensorboards in a Location."]
pub async fn list_tensorboards(
&mut self,
request: impl tonic::IntoRequest<super::ListTensorboardsRequest>,
) -> Result<tonic::Response<super::ListTensorboardsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/ListTensorboards",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a Tensorboard."]
pub async fn delete_tensorboard(
&mut self,
request: impl tonic::IntoRequest<super::DeleteTensorboardRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/DeleteTensorboard",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Creates a TensorboardExperiment."]
pub async fn create_tensorboard_experiment(
&mut self,
request: impl tonic::IntoRequest<super::CreateTensorboardExperimentRequest>,
) -> Result<tonic::Response<super::TensorboardExperiment>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/CreateTensorboardExperiment",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets a TensorboardExperiment."]
pub async fn get_tensorboard_experiment(
&mut self,
request: impl tonic::IntoRequest<super::GetTensorboardExperimentRequest>,
) -> Result<tonic::Response<super::TensorboardExperiment>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/GetTensorboardExperiment",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates a TensorboardExperiment."]
pub async fn update_tensorboard_experiment(
&mut self,
request: impl tonic::IntoRequest<super::UpdateTensorboardExperimentRequest>,
) -> Result<tonic::Response<super::TensorboardExperiment>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/UpdateTensorboardExperiment",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists TensorboardExperiments in a Location."]
pub async fn list_tensorboard_experiments(
&mut self,
request: impl tonic::IntoRequest<super::ListTensorboardExperimentsRequest>,
) -> Result<tonic::Response<super::ListTensorboardExperimentsResponse>, tonic::Status>
{
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/ListTensorboardExperiments",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a TensorboardExperiment."]
pub async fn delete_tensorboard_experiment(
&mut self,
request: impl tonic::IntoRequest<super::DeleteTensorboardExperimentRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/DeleteTensorboardExperiment",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Creates a TensorboardRun."]
pub async fn create_tensorboard_run(
&mut self,
request: impl tonic::IntoRequest<super::CreateTensorboardRunRequest>,
) -> Result<tonic::Response<super::TensorboardRun>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/CreateTensorboardRun",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Batch create TensorboardRuns."]
pub async fn batch_create_tensorboard_runs(
&mut self,
request: impl tonic::IntoRequest<super::BatchCreateTensorboardRunsRequest>,
) -> Result<tonic::Response<super::BatchCreateTensorboardRunsResponse>, tonic::Status>
{
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/BatchCreateTensorboardRuns",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets a TensorboardRun."]
pub async fn get_tensorboard_run(
&mut self,
request: impl tonic::IntoRequest<super::GetTensorboardRunRequest>,
) -> Result<tonic::Response<super::TensorboardRun>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/GetTensorboardRun",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates a TensorboardRun."]
pub async fn update_tensorboard_run(
&mut self,
request: impl tonic::IntoRequest<super::UpdateTensorboardRunRequest>,
) -> Result<tonic::Response<super::TensorboardRun>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/UpdateTensorboardRun",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists TensorboardRuns in a Location."]
pub async fn list_tensorboard_runs(
&mut self,
request: impl tonic::IntoRequest<super::ListTensorboardRunsRequest>,
) -> Result<tonic::Response<super::ListTensorboardRunsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/ListTensorboardRuns",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a TensorboardRun."]
pub async fn delete_tensorboard_run(
&mut self,
request: impl tonic::IntoRequest<super::DeleteTensorboardRunRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/DeleteTensorboardRun",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Batch create TensorboardTimeSeries that belong to a TensorboardExperiment."]
pub async fn batch_create_tensorboard_time_series(
&mut self,
request: impl tonic::IntoRequest<super::BatchCreateTensorboardTimeSeriesRequest>,
) -> Result<tonic::Response<super::BatchCreateTensorboardTimeSeriesResponse>, tonic::Status>
{
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/BatchCreateTensorboardTimeSeries",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Creates a TensorboardTimeSeries."]
pub async fn create_tensorboard_time_series(
&mut self,
request: impl tonic::IntoRequest<super::CreateTensorboardTimeSeriesRequest>,
) -> Result<tonic::Response<super::TensorboardTimeSeries>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/CreateTensorboardTimeSeries",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets a TensorboardTimeSeries."]
pub async fn get_tensorboard_time_series(
&mut self,
request: impl tonic::IntoRequest<super::GetTensorboardTimeSeriesRequest>,
) -> Result<tonic::Response<super::TensorboardTimeSeries>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/GetTensorboardTimeSeries",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates a TensorboardTimeSeries."]
pub async fn update_tensorboard_time_series(
&mut self,
request: impl tonic::IntoRequest<super::UpdateTensorboardTimeSeriesRequest>,
) -> Result<tonic::Response<super::TensorboardTimeSeries>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/UpdateTensorboardTimeSeries",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists TensorboardTimeSeries in a Location."]
pub async fn list_tensorboard_time_series(
&mut self,
request: impl tonic::IntoRequest<super::ListTensorboardTimeSeriesRequest>,
) -> Result<tonic::Response<super::ListTensorboardTimeSeriesResponse>, tonic::Status>
{
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/ListTensorboardTimeSeries",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a TensorboardTimeSeries."]
pub async fn delete_tensorboard_time_series(
&mut self,
request: impl tonic::IntoRequest<super::DeleteTensorboardTimeSeriesRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/DeleteTensorboardTimeSeries",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Reads multiple TensorboardTimeSeries' data. The data point number limit is"]
#[doc = " 1000 for scalars, 100 for tensors and blob references. If the number of"]
#[doc = " data points stored is less than the limit, all data will be returned."]
#[doc = " Otherwise, that limit number of data points will be randomly selected from"]
#[doc = " this time series and returned."]
pub async fn batch_read_tensorboard_time_series_data(
&mut self,
request: impl tonic::IntoRequest<super::BatchReadTensorboardTimeSeriesDataRequest>,
) -> Result<tonic::Response<super::BatchReadTensorboardTimeSeriesDataResponse>, tonic::Status>
{
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/BatchReadTensorboardTimeSeriesData",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Reads a TensorboardTimeSeries' data. By default, if the number of data"]
#[doc = " points stored is less than 1000, all data will be returned. Otherwise, 1000"]
#[doc = " data points will be randomly selected from this time series and returned."]
#[doc = " This value can be changed by changing max_data_points, which can't be"]
#[doc = " greater than 10k."]
pub async fn read_tensorboard_time_series_data(
&mut self,
request: impl tonic::IntoRequest<super::ReadTensorboardTimeSeriesDataRequest>,
) -> Result<tonic::Response<super::ReadTensorboardTimeSeriesDataResponse>, tonic::Status>
{
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/ReadTensorboardTimeSeriesData",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets bytes of TensorboardBlobs."]
#[doc = " This is to allow reading blob data stored in consumer project's Cloud"]
#[doc = " Storage bucket without users having to obtain Cloud Storage access"]
#[doc = " permission."]
pub async fn read_tensorboard_blob_data(
&mut self,
request: impl tonic::IntoRequest<super::ReadTensorboardBlobDataRequest>,
) -> Result<
tonic::Response<tonic::codec::Streaming<super::ReadTensorboardBlobDataResponse>>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/ReadTensorboardBlobData",
);
self.inner.server_streaming(request.into_request(), path, codec).await
}
#[doc = " Write time series data points of multiple TensorboardTimeSeries in multiple"]
#[doc = " TensorboardRun's. If any data fail to be ingested, an error will be"]
#[doc = " returned."]
pub async fn write_tensorboard_experiment_data(
&mut self,
request: impl tonic::IntoRequest<super::WriteTensorboardExperimentDataRequest>,
) -> Result<tonic::Response<super::WriteTensorboardExperimentDataResponse>, tonic::Status>
{
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/WriteTensorboardExperimentData",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Write time series data points into multiple TensorboardTimeSeries under"]
#[doc = " a TensorboardRun. If any data fail to be ingested, an error will be"]
#[doc = " returned."]
pub async fn write_tensorboard_run_data(
&mut self,
request: impl tonic::IntoRequest<super::WriteTensorboardRunDataRequest>,
) -> Result<tonic::Response<super::WriteTensorboardRunDataResponse>, tonic::Status>
{
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/WriteTensorboardRunData",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Exports a TensorboardTimeSeries' data. Data is returned in paginated"]
#[doc = " responses."]
pub async fn export_tensorboard_time_series_data(
&mut self,
request: impl tonic::IntoRequest<super::ExportTensorboardTimeSeriesDataRequest>,
) -> Result<tonic::Response<super::ExportTensorboardTimeSeriesDataResponse>, tonic::Status>
{
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.TensorboardService/ExportTensorboardTimeSeriesData",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
}
/// Request message for \[VizierService.GetStudy][google.cloud.aiplatform.v1.VizierService.GetStudy\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetStudyRequest {
/// Required. The name of the Study resource.
/// Format: `projects/{project}/locations/{location}/studies/{study}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[VizierService.CreateStudy][google.cloud.aiplatform.v1.VizierService.CreateStudy\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateStudyRequest {
/// Required. The resource name of the Location to create the CustomJob in.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The Study configuration used to create the Study.
#[prost(message, optional, tag = "2")]
pub study: ::core::option::Option<Study>,
}
/// Request message for \[VizierService.ListStudies][google.cloud.aiplatform.v1.VizierService.ListStudies\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListStudiesRequest {
/// Required. The resource name of the Location to list the Study from.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Optional. A page token to request the next page of results.
/// If unspecified, there are no subsequent pages.
#[prost(string, tag = "2")]
pub page_token: ::prost::alloc::string::String,
/// Optional. The maximum number of studies to return per "page" of results.
/// If unspecified, service will pick an appropriate default.
#[prost(int32, tag = "3")]
pub page_size: i32,
}
/// Response message for \[VizierService.ListStudies][google.cloud.aiplatform.v1.VizierService.ListStudies\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListStudiesResponse {
/// The studies associated with the project.
#[prost(message, repeated, tag = "1")]
pub studies: ::prost::alloc::vec::Vec<Study>,
/// Passes this token as the `page_token` field of the request for a
/// subsequent call.
/// If this field is omitted, there are no subsequent pages.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[VizierService.DeleteStudy][google.cloud.aiplatform.v1.VizierService.DeleteStudy\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteStudyRequest {
/// Required. The name of the Study resource to be deleted.
/// Format: `projects/{project}/locations/{location}/studies/{study}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[VizierService.LookupStudy][google.cloud.aiplatform.v1.VizierService.LookupStudy\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct LookupStudyRequest {
/// Required. The resource name of the Location to get the Study from.
/// Format: `projects/{project}/locations/{location}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The user-defined display name of the Study
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
}
/// Request message for \[VizierService.SuggestTrials][google.cloud.aiplatform.v1.VizierService.SuggestTrials\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SuggestTrialsRequest {
/// Required. The project and location that the Study belongs to.
/// Format: `projects/{project}/locations/{location}/studies/{study}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The number of suggestions requested.
#[prost(int32, tag = "2")]
pub suggestion_count: i32,
/// Required. The identifier of the client that is requesting the suggestion.
///
/// If multiple SuggestTrialsRequests have the same `client_id`,
/// the service will return the identical suggested Trial if the Trial is
/// pending, and provide a new Trial if the last suggested Trial was completed.
#[prost(string, tag = "3")]
pub client_id: ::prost::alloc::string::String,
}
/// Response message for \[VizierService.SuggestTrials][google.cloud.aiplatform.v1.VizierService.SuggestTrials\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SuggestTrialsResponse {
/// A list of Trials.
#[prost(message, repeated, tag = "1")]
pub trials: ::prost::alloc::vec::Vec<Trial>,
/// The state of the Study.
#[prost(enumeration = "study::State", tag = "2")]
pub study_state: i32,
/// The time at which the operation was started.
#[prost(message, optional, tag = "3")]
pub start_time: ::core::option::Option<::prost_types::Timestamp>,
/// The time at which operation processing completed.
#[prost(message, optional, tag = "4")]
pub end_time: ::core::option::Option<::prost_types::Timestamp>,
}
/// Details of operations that perform Trials suggestion.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SuggestTrialsMetadata {
/// Operation metadata for suggesting Trials.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
/// The identifier of the client that is requesting the suggestion.
///
/// If multiple SuggestTrialsRequests have the same `client_id`,
/// the service will return the identical suggested Trial if the Trial is
/// pending, and provide a new Trial if the last suggested Trial was completed.
#[prost(string, tag = "2")]
pub client_id: ::prost::alloc::string::String,
}
/// Request message for \[VizierService.CreateTrial][google.cloud.aiplatform.v1.VizierService.CreateTrial\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateTrialRequest {
/// Required. The resource name of the Study to create the Trial in.
/// Format: `projects/{project}/locations/{location}/studies/{study}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Required. The Trial to create.
#[prost(message, optional, tag = "2")]
pub trial: ::core::option::Option<Trial>,
}
/// Request message for \[VizierService.GetTrial][google.cloud.aiplatform.v1.VizierService.GetTrial\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetTrialRequest {
/// Required. The name of the Trial resource.
/// Format:
/// `projects/{project}/locations/{location}/studies/{study}/trials/{trial}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[VizierService.ListTrials][google.cloud.aiplatform.v1.VizierService.ListTrials\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListTrialsRequest {
/// Required. The resource name of the Study to list the Trial from.
/// Format: `projects/{project}/locations/{location}/studies/{study}`
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
/// Optional. A page token to request the next page of results.
/// If unspecified, there are no subsequent pages.
#[prost(string, tag = "2")]
pub page_token: ::prost::alloc::string::String,
/// Optional. The number of Trials to retrieve per "page" of results.
/// If unspecified, the service will pick an appropriate default.
#[prost(int32, tag = "3")]
pub page_size: i32,
}
/// Response message for \[VizierService.ListTrials][google.cloud.aiplatform.v1.VizierService.ListTrials\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListTrialsResponse {
/// The Trials associated with the Study.
#[prost(message, repeated, tag = "1")]
pub trials: ::prost::alloc::vec::Vec<Trial>,
/// Pass this token as the `page_token` field of the request for a
/// subsequent call.
/// If this field is omitted, there are no subsequent pages.
#[prost(string, tag = "2")]
pub next_page_token: ::prost::alloc::string::String,
}
/// Request message for \[VizierService.AddTrialMeasurement][google.cloud.aiplatform.v1.VizierService.AddTrialMeasurement\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AddTrialMeasurementRequest {
/// Required. The name of the trial to add measurement.
/// Format:
/// `projects/{project}/locations/{location}/studies/{study}/trials/{trial}`
#[prost(string, tag = "1")]
pub trial_name: ::prost::alloc::string::String,
/// Required. The measurement to be added to a Trial.
#[prost(message, optional, tag = "3")]
pub measurement: ::core::option::Option<Measurement>,
}
/// Request message for \[VizierService.CompleteTrial][google.cloud.aiplatform.v1.VizierService.CompleteTrial\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CompleteTrialRequest {
/// Required. The Trial's name.
/// Format:
/// `projects/{project}/locations/{location}/studies/{study}/trials/{trial}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
/// Optional. If provided, it will be used as the completed Trial's
/// final_measurement; Otherwise, the service will auto-select a
/// previously reported measurement as the final-measurement
#[prost(message, optional, tag = "2")]
pub final_measurement: ::core::option::Option<Measurement>,
/// Optional. True if the Trial cannot be run with the given Parameter, and
/// final_measurement will be ignored.
#[prost(bool, tag = "3")]
pub trial_infeasible: bool,
/// Optional. A human readable reason why the trial was infeasible. This should
/// only be provided if `trial_infeasible` is true.
#[prost(string, tag = "4")]
pub infeasible_reason: ::prost::alloc::string::String,
}
/// Request message for \[VizierService.DeleteTrial][google.cloud.aiplatform.v1.VizierService.DeleteTrial\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteTrialRequest {
/// Required. The Trial's name.
/// Format:
/// `projects/{project}/locations/{location}/studies/{study}/trials/{trial}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[VizierService.CheckTrialEarlyStoppingState][google.cloud.aiplatform.v1.VizierService.CheckTrialEarlyStoppingState\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CheckTrialEarlyStoppingStateRequest {
/// Required. The Trial's name.
/// Format:
/// `projects/{project}/locations/{location}/studies/{study}/trials/{trial}`
#[prost(string, tag = "1")]
pub trial_name: ::prost::alloc::string::String,
}
/// Response message for \[VizierService.CheckTrialEarlyStoppingState][google.cloud.aiplatform.v1.VizierService.CheckTrialEarlyStoppingState\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CheckTrialEarlyStoppingStateResponse {
/// True if the Trial should stop.
#[prost(bool, tag = "1")]
pub should_stop: bool,
}
/// This message will be placed in the metadata field of a
/// google.longrunning.Operation associated with a CheckTrialEarlyStoppingState
/// request.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CheckTrialEarlyStoppingStateMetatdata {
/// Operation metadata for suggesting Trials.
#[prost(message, optional, tag = "1")]
pub generic_metadata: ::core::option::Option<GenericOperationMetadata>,
/// The name of the Study that the Trial belongs to.
#[prost(string, tag = "2")]
pub study: ::prost::alloc::string::String,
/// The Trial name.
#[prost(string, tag = "3")]
pub trial: ::prost::alloc::string::String,
}
/// Request message for \[VizierService.StopTrial][google.cloud.aiplatform.v1.VizierService.StopTrial\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct StopTrialRequest {
/// Required. The Trial's name.
/// Format:
/// `projects/{project}/locations/{location}/studies/{study}/trials/{trial}`
#[prost(string, tag = "1")]
pub name: ::prost::alloc::string::String,
}
/// Request message for \[VizierService.ListOptimalTrials][google.cloud.aiplatform.v1.VizierService.ListOptimalTrials\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListOptimalTrialsRequest {
/// Required. The name of the Study that the optimal Trial belongs to.
#[prost(string, tag = "1")]
pub parent: ::prost::alloc::string::String,
}
/// Response message for \[VizierService.ListOptimalTrials][google.cloud.aiplatform.v1.VizierService.ListOptimalTrials\].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListOptimalTrialsResponse {
/// The pareto-optimal Trials for multiple objective Study or the
/// optimal trial for single objective Study. The definition of
/// pareto-optimal can be checked in wiki page.
/// <https://en.wikipedia.org/wiki/Pareto_efficiency>
#[prost(message, repeated, tag = "1")]
pub optimal_trials: ::prost::alloc::vec::Vec<Trial>,
}
#[doc = r" Generated client implementations."]
pub mod vizier_service_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[doc = " Vertex Vizier API."]
#[doc = ""]
#[doc = " Vizier service is a GCP service to solve blackbox optimization problems,"]
#[doc = " such as tuning machine learning hyperparameters and searching over deep"]
#[doc = " learning architectures."]
#[derive(Debug, Clone)]
pub struct VizierServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl<T> VizierServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> VizierServiceClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
VizierServiceClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Creates a Study. A resource name will be generated after creation of the"]
#[doc = " Study."]
pub async fn create_study(
&mut self,
request: impl tonic::IntoRequest<super::CreateStudyRequest>,
) -> Result<tonic::Response<super::Study>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.VizierService/CreateStudy",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets a Study by name."]
pub async fn get_study(
&mut self,
request: impl tonic::IntoRequest<super::GetStudyRequest>,
) -> Result<tonic::Response<super::Study>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.VizierService/GetStudy",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists all the studies in a region for an associated project."]
pub async fn list_studies(
&mut self,
request: impl tonic::IntoRequest<super::ListStudiesRequest>,
) -> Result<tonic::Response<super::ListStudiesResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.VizierService/ListStudies",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a Study."]
pub async fn delete_study(
&mut self,
request: impl tonic::IntoRequest<super::DeleteStudyRequest>,
) -> Result<tonic::Response<()>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.VizierService/DeleteStudy",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Looks a study up using the user-defined display_name field instead of the"]
#[doc = " fully qualified resource name."]
pub async fn lookup_study(
&mut self,
request: impl tonic::IntoRequest<super::LookupStudyRequest>,
) -> Result<tonic::Response<super::Study>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.VizierService/LookupStudy",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Adds one or more Trials to a Study, with parameter values"]
#[doc = " suggested by Vertex Vizier. Returns a long-running"]
#[doc = " operation associated with the generation of Trial suggestions."]
#[doc = " When this long-running operation succeeds, it will contain"]
#[doc = " a [SuggestTrialsResponse][google.cloud.ml.v1.SuggestTrialsResponse]."]
pub async fn suggest_trials(
&mut self,
request: impl tonic::IntoRequest<super::SuggestTrialsRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.VizierService/SuggestTrials",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Adds a user provided Trial to a Study."]
pub async fn create_trial(
&mut self,
request: impl tonic::IntoRequest<super::CreateTrialRequest>,
) -> Result<tonic::Response<super::Trial>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.VizierService/CreateTrial",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets a Trial."]
pub async fn get_trial(
&mut self,
request: impl tonic::IntoRequest<super::GetTrialRequest>,
) -> Result<tonic::Response<super::Trial>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.VizierService/GetTrial",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists the Trials associated with a Study."]
pub async fn list_trials(
&mut self,
request: impl tonic::IntoRequest<super::ListTrialsRequest>,
) -> Result<tonic::Response<super::ListTrialsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.VizierService/ListTrials",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Adds a measurement of the objective metrics to a Trial. This measurement"]
#[doc = " is assumed to have been taken before the Trial is complete."]
pub async fn add_trial_measurement(
&mut self,
request: impl tonic::IntoRequest<super::AddTrialMeasurementRequest>,
) -> Result<tonic::Response<super::Trial>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.VizierService/AddTrialMeasurement",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Marks a Trial as complete."]
pub async fn complete_trial(
&mut self,
request: impl tonic::IntoRequest<super::CompleteTrialRequest>,
) -> Result<tonic::Response<super::Trial>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.VizierService/CompleteTrial",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a Trial."]
pub async fn delete_trial(
&mut self,
request: impl tonic::IntoRequest<super::DeleteTrialRequest>,
) -> Result<tonic::Response<()>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.VizierService/DeleteTrial",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Checks whether a Trial should stop or not. Returns a"]
#[doc = " long-running operation. When the operation is successful,"]
#[doc = " it will contain a"]
#[doc = " [CheckTrialEarlyStoppingStateResponse][google.cloud.ml.v1.CheckTrialEarlyStoppingStateResponse]."]
pub async fn check_trial_early_stopping_state(
&mut self,
request: impl tonic::IntoRequest<super::CheckTrialEarlyStoppingStateRequest>,
) -> Result<
tonic::Response<super::super::super::super::longrunning::Operation>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.VizierService/CheckTrialEarlyStoppingState",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Stops a Trial."]
pub async fn stop_trial(
&mut self,
request: impl tonic::IntoRequest<super::StopTrialRequest>,
) -> Result<tonic::Response<super::Trial>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.VizierService/StopTrial",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists the pareto-optimal Trials for multi-objective Study or the"]
#[doc = " optimal Trials for single-objective Study. The definition of"]
#[doc = " pareto-optimal can be checked in wiki page."]
#[doc = " https://en.wikipedia.org/wiki/Pareto_efficiency"]
pub async fn list_optimal_trials(
&mut self,
request: impl tonic::IntoRequest<super::ListOptimalTrialsRequest>,
) -> Result<tonic::Response<super::ListOptimalTrialsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.aiplatform.v1.VizierService/ListOptimalTrials",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
}
| 51.503065 | 202 | 0.647798 |
fcdff59ffa984cd9b9b58754248715aabd437891 | 2,823 | // run-pass
// Checks if the correct registers are being used to pass arguments
// when the sysv64 ABI is specified.
// ignore-android
// ignore-arm
// ignore-aarch64
#![feature(llvm_asm)]
#[cfg(target_arch = "x86_64")]
pub extern "sysv64" fn all_the_registers(rdi: i64, rsi: i64, rdx: i64,
rcx: i64, r8 : i64, r9 : i64,
xmm0: f32, xmm1: f32, xmm2: f32,
xmm3: f32, xmm4: f32, xmm5: f32,
xmm6: f32, xmm7: f32) -> i64 {
assert_eq!(rdi, 1);
assert_eq!(rsi, 2);
assert_eq!(rdx, 3);
assert_eq!(rcx, 4);
assert_eq!(r8, 5);
assert_eq!(r9, 6);
assert_eq!(xmm0, 1.0f32);
assert_eq!(xmm1, 2.0f32);
assert_eq!(xmm2, 4.0f32);
assert_eq!(xmm3, 8.0f32);
assert_eq!(xmm4, 16.0f32);
assert_eq!(xmm5, 32.0f32);
assert_eq!(xmm6, 64.0f32);
assert_eq!(xmm7, 128.0f32);
42
}
// this struct contains 8 i64's, while only 6 can be passed in registers.
#[cfg(target_arch = "x86_64")]
#[derive(PartialEq, Eq, Debug)]
pub struct LargeStruct(i64, i64, i64, i64, i64, i64, i64, i64);
#[cfg(target_arch = "x86_64")]
#[inline(never)]
pub extern "sysv64" fn large_struct_by_val(mut foo: LargeStruct) -> LargeStruct {
foo.0 *= 1;
foo.1 *= 2;
foo.2 *= 3;
foo.3 *= 4;
foo.4 *= 5;
foo.5 *= 6;
foo.6 *= 7;
foo.7 *= 8;
foo
}
#[cfg(target_arch = "x86_64")]
pub fn main() {
let result: i64;
unsafe {
llvm_asm!("mov rdi, 1;
mov rsi, 2;
mov rdx, 3;
mov rcx, 4;
mov r8, 5;
mov r9, 6;
mov eax, 0x3F800000;
movd xmm0, eax;
mov eax, 0x40000000;
movd xmm1, eax;
mov eax, 0x40800000;
movd xmm2, eax;
mov eax, 0x41000000;
movd xmm3, eax;
mov eax, 0x41800000;
movd xmm4, eax;
mov eax, 0x42000000;
movd xmm5, eax;
mov eax, 0x42800000;
movd xmm6, eax;
mov eax, 0x43000000;
movd xmm7, eax;
call r10
"
: "={rax}"(result)
: "{r10}"(all_the_registers as usize)
: "rdi", "rsi", "rdx", "rcx", "r8", "r9", "r11", "cc", "memory"
: "intel", "alignstack"
)
}
assert_eq!(result, 42);
assert_eq!(
large_struct_by_val(LargeStruct(1, 2, 3, 4, 5, 6, 7, 8)),
LargeStruct(1, 4, 9, 16, 25, 36, 49, 64)
);
}
#[cfg(not(target_arch = "x86_64"))]
pub fn main() {}
| 29.103093 | 81 | 0.462983 |
dbb7e717f720f421f0b9fdd50e563e57650496f8 | 11,686 | //! SMIME implementation using CMS
//!
//! CMS (PKCS#7) is an encyption standard. It allows signing and encrypting data using
//! X.509 certificates. The OpenSSL implementation of CMS is used in email encryption
//! generated from a `Vec` of bytes. This `Vec` follows the smime protocol standards.
//! Data accepted by this module will be smime type `enveloped-data`.
use bitflags::bitflags;
use foreign_types::{ForeignType, ForeignTypeRef};
use libc::c_uint;
use std::ptr;
use crate::bio::{MemBio, MemBioSlice};
use crate::error::ErrorStack;
use crate::pkey::{HasPrivate, PKeyRef};
use crate::stack::StackRef;
use crate::symm::Cipher;
use crate::x509::{X509Ref, X509};
use crate::{cvt, cvt_p};
bitflags! {
pub struct CMSOptions : c_uint {
const TEXT = ffi::CMS_TEXT;
const CMS_NOCERTS = ffi::CMS_NOCERTS;
const NO_CONTENT_VERIFY = ffi::CMS_NO_CONTENT_VERIFY;
const NO_ATTR_VERIFY = ffi::CMS_NO_ATTR_VERIFY;
const NOSIGS = ffi::CMS_NOSIGS;
const NOINTERN = ffi::CMS_NOINTERN;
const NO_SIGNER_CERT_VERIFY = ffi::CMS_NO_SIGNER_CERT_VERIFY;
const NOVERIFY = ffi::CMS_NOVERIFY;
const DETACHED = ffi::CMS_DETACHED;
const BINARY = ffi::CMS_BINARY;
const NOATTR = ffi::CMS_NOATTR;
const NOSMIMECAP = ffi::CMS_NOSMIMECAP;
const NOOLDMIMETYPE = ffi::CMS_NOOLDMIMETYPE;
const CRLFEOL = ffi::CMS_CRLFEOL;
const STREAM = ffi::CMS_STREAM;
const NOCRL = ffi::CMS_NOCRL;
const PARTIAL = ffi::CMS_PARTIAL;
const REUSE_DIGEST = ffi::CMS_REUSE_DIGEST;
const USE_KEYID = ffi::CMS_USE_KEYID;
const DEBUG_DECRYPT = ffi::CMS_DEBUG_DECRYPT;
#[cfg(all(not(libressl), not(ossl101)))]
const KEY_PARAM = ffi::CMS_KEY_PARAM;
#[cfg(all(not(libressl), not(ossl101), not(ossl102)))]
const ASCIICRLF = ffi::CMS_ASCIICRLF;
}
}
foreign_type_and_impl_send_sync! {
type CType = ffi::CMS_ContentInfo;
fn drop = ffi::CMS_ContentInfo_free;
/// High level CMS wrapper
///
/// CMS supports nesting various types of data, including signatures, certificates,
/// encrypted data, smime messages (encrypted email), and data digest. The ContentInfo
/// content type is the encapsulation of all those content types. [`RFC 5652`] describes
/// CMS and OpenSSL follows this RFC's implmentation.
///
/// [`RFC 5652`]: https://tools.ietf.org/html/rfc5652#page-6
pub struct CmsContentInfo;
/// Reference to [`CMSContentInfo`]
///
/// [`CMSContentInfo`]:struct.CmsContentInfo.html
pub struct CmsContentInfoRef;
}
impl CmsContentInfoRef {
/// Given the sender's private key, `pkey` and the recipient's certificiate, `cert`,
/// decrypt the data in `self`.
///
/// OpenSSL documentation at [`CMS_decrypt`]
///
/// [`CMS_decrypt`]: https://www.openssl.org/docs/man1.1.0/crypto/CMS_decrypt.html
pub fn decrypt<T>(&self, pkey: &PKeyRef<T>, cert: &X509) -> Result<Vec<u8>, ErrorStack>
where
T: HasPrivate,
{
unsafe {
let pkey = pkey.as_ptr();
let cert = cert.as_ptr();
let out = MemBio::new()?;
cvt(ffi::CMS_decrypt(
self.as_ptr(),
pkey,
cert,
ptr::null_mut(),
out.as_ptr(),
0,
))?;
Ok(out.get_buf().to_owned())
}
}
/// Given the sender's private key, `pkey`,
/// decrypt the data in `self` without validating the recipient certificate.
///
/// *Warning*: Not checking the recipient certificate may leave you vulnerable to Bleichenbacher's attack on PKCS#1 v1.5 RSA padding.
/// See [`CMS_decrypt`] for more information.
///
/// [`CMS_decrypt`]: https://www.openssl.org/docs/man1.1.0/crypto/CMS_decrypt.html
// FIXME merge into decrypt
pub fn decrypt_without_cert_check<T>(&self, pkey: &PKeyRef<T>) -> Result<Vec<u8>, ErrorStack>
where
T: HasPrivate,
{
unsafe {
let pkey = pkey.as_ptr();
let out = MemBio::new()?;
cvt(ffi::CMS_decrypt(
self.as_ptr(),
pkey,
ptr::null_mut(),
ptr::null_mut(),
out.as_ptr(),
0,
))?;
Ok(out.get_buf().to_owned())
}
}
to_der! {
/// Serializes this CmsContentInfo using DER.
///
/// OpenSSL documentation at [`i2d_CMS_ContentInfo`]
///
/// [`i2d_CMS_ContentInfo`]: https://www.openssl.org/docs/man1.0.2/crypto/i2d_CMS_ContentInfo.html
to_der,
ffi::i2d_CMS_ContentInfo
}
to_pem! {
/// Serializes this CmsContentInfo using DER.
///
/// OpenSSL documentation at [`PEM_write_bio_CMS`]
///
/// [`PEM_write_bio_CMS`]: https://www.openssl.org/docs/man1.1.0/man3/PEM_write_bio_CMS.html
to_pem,
ffi::PEM_write_bio_CMS
}
}
impl CmsContentInfo {
/// Parses a smime formatted `vec` of bytes into a `CmsContentInfo`.
///
/// OpenSSL documentation at [`SMIME_read_CMS`]
///
/// [`SMIME_read_CMS`]: https://www.openssl.org/docs/man1.0.2/crypto/SMIME_read_CMS.html
pub fn smime_read_cms(smime: &[u8]) -> Result<CmsContentInfo, ErrorStack> {
unsafe {
let bio = MemBioSlice::new(smime)?;
let cms = cvt_p(ffi::SMIME_read_CMS(bio.as_ptr(), ptr::null_mut()))?;
Ok(CmsContentInfo::from_ptr(cms))
}
}
from_der! {
/// Deserializes a DER-encoded ContentInfo structure.
///
/// This corresponds to [`d2i_CMS_ContentInfo`].
///
/// [`d2i_CMS_ContentInfo`]: https://www.openssl.org/docs/manmaster/man3/d2i_X509.html
from_der,
CmsContentInfo,
ffi::d2i_CMS_ContentInfo
}
from_pem! {
/// Deserializes a PEM-encoded ContentInfo structure.
///
/// This corresponds to [`PEM_read_bio_CMS`].
///
/// [`PEM_read_bio_CMS`]: https://www.openssl.org/docs/man1.1.0/man3/PEM_read_bio_CMS.html
from_pem,
CmsContentInfo,
ffi::PEM_read_bio_CMS
}
/// Given a signing cert `signcert`, private key `pkey`, a certificate stack `certs`,
/// data `data` and flags `flags`, create a CmsContentInfo struct.
///
/// All arguments are optional.
///
/// OpenSSL documentation at [`CMS_sign`]
///
/// [`CMS_sign`]: https://www.openssl.org/docs/manmaster/man3/CMS_sign.html
pub fn sign<T>(
signcert: Option<&X509Ref>,
pkey: Option<&PKeyRef<T>>,
certs: Option<&StackRef<X509>>,
data: Option<&[u8]>,
flags: CMSOptions,
) -> Result<CmsContentInfo, ErrorStack>
where
T: HasPrivate,
{
unsafe {
let signcert = signcert.map_or(ptr::null_mut(), |p| p.as_ptr());
let pkey = pkey.map_or(ptr::null_mut(), |p| p.as_ptr());
let data_bio = match data {
Some(data) => Some(MemBioSlice::new(data)?),
None => None,
};
let data_bio_ptr = data_bio.as_ref().map_or(ptr::null_mut(), |p| p.as_ptr());
let certs = certs.map_or(ptr::null_mut(), |p| p.as_ptr());
let cms = cvt_p(ffi::CMS_sign(
signcert,
pkey,
certs,
data_bio_ptr,
flags.bits(),
))?;
Ok(CmsContentInfo::from_ptr(cms))
}
}
/// Given a certificate stack `certs`, data `data`, cipher `cipher` and flags `flags`,
/// create a CmsContentInfo struct.
///
/// OpenSSL documentation at [`CMS_encrypt`]
///
/// [`CMS_encrypt`]: https://www.openssl.org/docs/manmaster/man3/CMS_encrypt.html
pub fn encrypt(
certs: &StackRef<X509>,
data: &[u8],
cipher: Cipher,
flags: CMSOptions,
) -> Result<CmsContentInfo, ErrorStack> {
unsafe {
let data_bio = MemBioSlice::new(data)?;
let cms = cvt_p(ffi::CMS_encrypt(
certs.as_ptr(),
data_bio.as_ptr(),
cipher.as_ptr(),
flags.bits(),
))?;
Ok(CmsContentInfo::from_ptr(cms))
}
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::pkcs12::Pkcs12;
use crate::stack::Stack;
use crate::x509::X509;
#[test]
fn cms_encrypt_decrypt() {
// load cert with public key only
let pub_cert_bytes = include_bytes!("../test/cms_pubkey.der");
let pub_cert = X509::from_der(pub_cert_bytes).expect("failed to load pub cert");
// load cert with private key
let priv_cert_bytes = include_bytes!("../test/cms.p12");
let priv_cert = Pkcs12::from_der(priv_cert_bytes).expect("failed to load priv cert");
let priv_cert = priv_cert
.parse("mypass")
.expect("failed to parse priv cert");
// encrypt cms message using public key cert
let input = String::from("My Message");
let mut cert_stack = Stack::new().expect("failed to create stack");
cert_stack
.push(pub_cert)
.expect("failed to add pub cert to stack");
let encrypt = CmsContentInfo::encrypt(
&cert_stack,
&input.as_bytes(),
Cipher::des_ede3_cbc(),
CMSOptions::empty(),
)
.expect("failed create encrypted cms");
// decrypt cms message using private key cert (DER)
{
let encrypted_der = encrypt.to_der().expect("failed to create der from cms");
let decrypt =
CmsContentInfo::from_der(&encrypted_der).expect("failed read cms from der");
let decrypt_with_cert_check = decrypt
.decrypt(&priv_cert.pkey, &priv_cert.cert)
.expect("failed to decrypt cms");
let decrypt_with_cert_check = String::from_utf8(decrypt_with_cert_check)
.expect("failed to create string from cms content");
let decrypt_without_cert_check = decrypt
.decrypt_without_cert_check(&priv_cert.pkey)
.expect("failed to decrypt cms");
let decrypt_without_cert_check = String::from_utf8(decrypt_without_cert_check)
.expect("failed to create string from cms content");
assert_eq!(input, decrypt_with_cert_check);
assert_eq!(input, decrypt_without_cert_check);
}
// decrypt cms message using private key cert (PEM)
{
let encrypted_pem = encrypt.to_pem().expect("failed to create pem from cms");
let decrypt =
CmsContentInfo::from_pem(&encrypted_pem).expect("failed read cms from pem");
let decrypt_with_cert_check = decrypt
.decrypt(&priv_cert.pkey, &priv_cert.cert)
.expect("failed to decrypt cms");
let decrypt_with_cert_check = String::from_utf8(decrypt_with_cert_check)
.expect("failed to create string from cms content");
let decrypt_without_cert_check = decrypt
.decrypt_without_cert_check(&priv_cert.pkey)
.expect("failed to decrypt cms");
let decrypt_without_cert_check = String::from_utf8(decrypt_without_cert_check)
.expect("failed to create string from cms content");
assert_eq!(input, decrypt_with_cert_check);
assert_eq!(input, decrypt_without_cert_check);
}
}
}
| 35.093093 | 137 | 0.589081 |
e22688b4cbd235907148f6bf87581ff8610a14c6 | 32 | pub use header::*;
mod header;
| 8 | 18 | 0.65625 |
1c2ebf169a6c9ca280309adcf5a1c4fabcb5949c | 3,854 | use crate::generated::common::*;
use crate::generated::service::*;
use common::core::BaseMessage;
use common::traits::{
DispatcherId, EmptyConfig, OriginAttributes, Service, SessionSupport, Shared,
SharedServiceState, SharedSessionContext, StateLogger, TrackerId,
};
use log::{debug, info};
pub struct SharedObj {
volume_state: AudioVolumeState,
event_broadcaster: AudioVolumeEventBroadcaster,
}
impl From<&EmptyConfig> for SharedObj {
fn from(_config: &EmptyConfig) -> Self {
Self {
volume_state: AudioVolumeState::None,
event_broadcaster: AudioVolumeEventBroadcaster::default(),
}
}
}
impl StateLogger for SharedObj {
fn log(&self) {
self.event_broadcaster.log();
}
}
pub struct AudioVolume {
id: TrackerId,
shared_obj: Shared<SharedObj>,
dispatcher_id: DispatcherId,
}
impl AudioVolume {
fn set_state(&self, state: AudioVolumeState) {
let mut shared_lock = self.shared_obj.lock();
shared_lock.volume_state = state;
debug!("broadcast AudioVolumeState {:?}", shared_lock.volume_state);
shared_lock
.event_broadcaster
.broadcast_audio_volume_changed(shared_lock.volume_state);
}
}
impl AudioVolumeManager for AudioVolume {}
impl AudioVolumeMethods for AudioVolume {
fn request_volume_up(&mut self, responder: AudioVolumeRequestVolumeUpResponder) {
self.set_state(AudioVolumeState::VolumeUp);
responder.resolve();
}
fn request_volume_down(&mut self, responder: AudioVolumeRequestVolumeDownResponder) {
self.set_state(AudioVolumeState::VolumeDown);
responder.resolve();
}
fn request_volume_show(&mut self, responder: AudioVolumeRequestVolumeShowResponder) {
self.set_state(AudioVolumeState::VolumeShow);
responder.resolve();
}
}
common::impl_shared_state!(AudioVolume, SharedObj, EmptyConfig);
impl Service<AudioVolume> for AudioVolume {
fn create(
_attrs: &OriginAttributes,
_context: SharedSessionContext,
helper: SessionSupport,
) -> Result<AudioVolume, String> {
info!("AudioVolumeService::create");
let service_id = helper.session_tracker_id().service();
let event_dispatcher = AudioVolumeEventDispatcher::from(helper, 0);
let shared_obj = Self::shared_state();
let dispatcher_id = shared_obj.lock().event_broadcaster.add(&event_dispatcher);
info!("AudioVolume::create with dispatcher_id {}", dispatcher_id);
let service = AudioVolume {
id: service_id,
shared_obj,
dispatcher_id,
};
Ok(service)
}
fn format_request(&mut self, _transport: &SessionSupport, message: &BaseMessage) -> String {
info!("AudioVolumeService::format_request");
let req: Result<AudioVolumeManagerFromClient, common::BincodeError> =
common::deserialize_bincode(&message.content);
match req {
Ok(req) => format!("AudioVolumeService request: {:?}", req),
Err(err) => format!("Unable to AudioVolumeService request: {:?}", err),
}
}
// Processes a request coming from the Session.
fn on_request(&mut self, transport: &SessionSupport, message: &BaseMessage) {
info!("incoming request {:?} ", message);
self.dispatch_request(transport, message);
}
fn release_object(&mut self, object_id: u32) -> bool {
info!("releasing object {}", object_id);
true
}
}
impl Drop for AudioVolume {
fn drop(&mut self) {
debug!(
"Dropping AudioVolume Service#{}, dispatcher_id {}",
self.id, self.dispatcher_id
);
let shared_lock = &mut self.shared_obj.lock();
shared_lock.event_broadcaster.remove(self.dispatcher_id);
}
}
| 31.333333 | 96 | 0.66165 |
de910dfaa418e0dd88c3ac6b480937c06685732f | 10,547 | use crate::azure::core::errors::{check_status_extract_headers_and_body, AzureError};
use crate::azure::core::lease::LeaseId;
use crate::azure::core::{
BlobNameRequired, BlobNameSupport, ClientRequestIdOption, ClientRequestIdSupport, ClientRequired, ContainerNameRequired,
ContainerNameSupport, LeaseIdOption, LeaseIdSupport, SnapshotRequired, SnapshotSupport, TimeoutOption, TimeoutSupport,
};
use crate::azure::core::{No, ToAssign, Yes};
use crate::azure::storage::blob::generate_blob_uri;
use crate::azure::storage::blob::responses::DeleteBlobResponse;
use crate::azure::storage::client::Client;
use chrono::{DateTime, Utc};
use futures::future::{done, Future};
use hyper::{Method, StatusCode};
use std::marker::PhantomData;
#[derive(Debug, Clone)]
pub struct DeleteBlobSnapshotBuilder<'a, ContainerNameSet, BlobNameSet, SnapshotSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SnapshotSet: ToAssign,
{
client: &'a Client,
p_container_name: PhantomData<ContainerNameSet>,
p_blob_name: PhantomData<BlobNameSet>,
p_snapshot: PhantomData<SnapshotSet>,
container_name: Option<&'a str>,
blob_name: Option<&'a str>,
snapshot: Option<DateTime<Utc>>,
timeout: Option<u64>,
lease_id: Option<&'a LeaseId>,
client_request_id: Option<&'a str>,
}
impl<'a> DeleteBlobSnapshotBuilder<'a, No, No, No> {
#[inline]
pub(crate) fn new(client: &'a Client) -> DeleteBlobSnapshotBuilder<'a, No, No, No> {
DeleteBlobSnapshotBuilder {
client,
p_container_name: PhantomData {},
container_name: None,
p_blob_name: PhantomData {},
blob_name: None,
p_snapshot: PhantomData {},
snapshot: None,
timeout: None,
lease_id: None,
client_request_id: None,
}
}
}
impl<'a, ContainerNameSet, BlobNameSet, SnapshotSet> ClientRequired<'a>
for DeleteBlobSnapshotBuilder<'a, ContainerNameSet, BlobNameSet, SnapshotSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SnapshotSet: ToAssign,
{
#[inline]
fn client(&self) -> &'a Client {
self.client
}
}
impl<'a, BlobNameSet, SnapshotSet> ContainerNameRequired<'a> for DeleteBlobSnapshotBuilder<'a, Yes, BlobNameSet, SnapshotSet>
where
BlobNameSet: ToAssign,
SnapshotSet: ToAssign,
{
#[inline]
fn container_name(&self) -> &'a str {
self.container_name.unwrap()
}
}
impl<'a, ContainerNameSet, SnapshotSet> BlobNameRequired<'a> for DeleteBlobSnapshotBuilder<'a, ContainerNameSet, Yes, SnapshotSet>
where
ContainerNameSet: ToAssign,
SnapshotSet: ToAssign,
{
#[inline]
fn blob_name(&self) -> &'a str {
self.blob_name.unwrap()
}
}
impl<'a, ContainerNameSet, BlobNameSet> SnapshotRequired for DeleteBlobSnapshotBuilder<'a, ContainerNameSet, BlobNameSet, Yes>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
{
#[inline]
fn snapshot(&self) -> DateTime<Utc> {
self.snapshot.unwrap()
}
}
impl<'a, ContainerNameSet, BlobNameSet, SnapshotSet> TimeoutOption
for DeleteBlobSnapshotBuilder<'a, ContainerNameSet, BlobNameSet, SnapshotSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SnapshotSet: ToAssign,
{
#[inline]
fn timeout(&self) -> Option<u64> {
self.timeout
}
}
impl<'a, ContainerNameSet, BlobNameSet, SnapshotSet> LeaseIdOption<'a>
for DeleteBlobSnapshotBuilder<'a, ContainerNameSet, BlobNameSet, SnapshotSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SnapshotSet: ToAssign,
{
#[inline]
fn lease_id(&self) -> Option<&'a LeaseId> {
self.lease_id
}
}
impl<'a, ContainerNameSet, BlobNameSet, SnapshotSet> ClientRequestIdOption<'a>
for DeleteBlobSnapshotBuilder<'a, ContainerNameSet, BlobNameSet, SnapshotSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SnapshotSet: ToAssign,
{
#[inline]
fn client_request_id(&self) -> Option<&'a str> {
self.client_request_id
}
}
impl<'a, ContainerNameSet, BlobNameSet, SnapshotSet> ContainerNameSupport<'a>
for DeleteBlobSnapshotBuilder<'a, ContainerNameSet, BlobNameSet, SnapshotSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SnapshotSet: ToAssign,
{
type O = DeleteBlobSnapshotBuilder<'a, Yes, BlobNameSet, SnapshotSet>;
#[inline]
fn with_container_name(self, container_name: &'a str) -> Self::O {
DeleteBlobSnapshotBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_snapshot: PhantomData {},
container_name: Some(container_name),
blob_name: self.blob_name,
snapshot: self.snapshot,
timeout: self.timeout,
lease_id: self.lease_id,
client_request_id: self.client_request_id,
}
}
}
impl<'a, ContainerNameSet, BlobNameSet, SnapshotSet> BlobNameSupport<'a>
for DeleteBlobSnapshotBuilder<'a, ContainerNameSet, BlobNameSet, SnapshotSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SnapshotSet: ToAssign,
{
type O = DeleteBlobSnapshotBuilder<'a, ContainerNameSet, Yes, SnapshotSet>;
#[inline]
fn with_blob_name(self, blob_name: &'a str) -> Self::O {
DeleteBlobSnapshotBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_snapshot: PhantomData {},
container_name: self.container_name,
blob_name: Some(blob_name),
snapshot: self.snapshot,
timeout: self.timeout,
lease_id: self.lease_id,
client_request_id: self.client_request_id,
}
}
}
impl<'a, ContainerNameSet, BlobNameSet, SnapshotSet> SnapshotSupport
for DeleteBlobSnapshotBuilder<'a, ContainerNameSet, BlobNameSet, SnapshotSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SnapshotSet: ToAssign,
{
type O = DeleteBlobSnapshotBuilder<'a, ContainerNameSet, BlobNameSet, Yes>;
#[inline]
fn with_snapshot(self, snapshot: DateTime<Utc>) -> Self::O {
DeleteBlobSnapshotBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_snapshot: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
snapshot: Some(snapshot),
timeout: self.timeout,
lease_id: self.lease_id,
client_request_id: self.client_request_id,
}
}
}
impl<'a, ContainerNameSet, BlobNameSet, SnapshotSet> TimeoutSupport
for DeleteBlobSnapshotBuilder<'a, ContainerNameSet, BlobNameSet, SnapshotSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SnapshotSet: ToAssign,
{
type O = DeleteBlobSnapshotBuilder<'a, ContainerNameSet, BlobNameSet, SnapshotSet>;
#[inline]
fn with_timeout(self, timeout: u64) -> Self::O {
DeleteBlobSnapshotBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_snapshot: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
snapshot: self.snapshot,
timeout: Some(timeout),
lease_id: self.lease_id,
client_request_id: self.client_request_id,
}
}
}
impl<'a, ContainerNameSet, BlobNameSet, SnapshotSet> LeaseIdSupport<'a>
for DeleteBlobSnapshotBuilder<'a, ContainerNameSet, BlobNameSet, SnapshotSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SnapshotSet: ToAssign,
{
type O = DeleteBlobSnapshotBuilder<'a, ContainerNameSet, BlobNameSet, SnapshotSet>;
#[inline]
fn with_lease_id(self, lease_id: &'a LeaseId) -> Self::O {
DeleteBlobSnapshotBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_snapshot: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
snapshot: self.snapshot,
timeout: self.timeout,
lease_id: Some(lease_id),
client_request_id: self.client_request_id,
}
}
}
impl<'a, ContainerNameSet, BlobNameSet, SnapshotSet> ClientRequestIdSupport<'a>
for DeleteBlobSnapshotBuilder<'a, ContainerNameSet, BlobNameSet, SnapshotSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SnapshotSet: ToAssign,
{
type O = DeleteBlobSnapshotBuilder<'a, ContainerNameSet, BlobNameSet, SnapshotSet>;
#[inline]
fn with_client_request_id(self, client_request_id: &'a str) -> Self::O {
DeleteBlobSnapshotBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_snapshot: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
snapshot: self.snapshot,
timeout: self.timeout,
lease_id: self.lease_id,
client_request_id: Some(client_request_id),
}
}
}
// methods callable regardless
impl<'a, ContainerNameSet, BlobNameSet, SnapshotSet> DeleteBlobSnapshotBuilder<'a, ContainerNameSet, BlobNameSet, SnapshotSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SnapshotSet: ToAssign,
{
}
impl<'a> DeleteBlobSnapshotBuilder<'a, Yes, Yes, Yes> {
pub fn finalize(self) -> impl Future<Item = DeleteBlobResponse, Error = AzureError> {
let mut uri = generate_blob_uri(&self, Some(&SnapshotRequired::to_uri_parameter(&self)));
if let Some(nm) = TimeoutOption::to_uri_parameter(&self) {
uri = format!("{}&{}", uri, nm);
}
let req = self.client().perform_request(
&uri,
&Method::DELETE,
|ref mut request| {
LeaseIdOption::add_header(&self, request);
ClientRequestIdOption::add_header(&self, request);
},
None,
);
done(req)
.from_err()
.and_then(move |future_response| check_status_extract_headers_and_body(future_response, StatusCode::ACCEPTED))
.and_then(|(headers, _body)| done(DeleteBlobResponse::from_headers(&headers)))
}
}
| 32.253823 | 130 | 0.662179 |
ff8e1225f442c6205c543be51ddfffa4576e3713 | 981 | mod archive;
mod bindgen;
mod build;
use crate::{
archive::{ MbedTlsTarball, Untar },
bindgen::{ IncludeDir, Headers, Bindgen },
build::{ BuildDir, ConfigH, MbedTls }
};
/// The main function :P
fn main() {
// Extract archive
let build_dir = BuildDir::from_env().unwrap_or_default();
let archive = MbedTlsTarball::from_env().unwrap_or_default();
Untar::from_env().unwrap_or_default().extract(archive.path(), build_dir.path(), 1);
// Build library
let config_h = ConfigH::from_env().unwrap_or_default();
let artifacts = MbedTls::new(config_h, build_dir).build();
// Link library
let lib_dir = artifacts.join("lib");
println!("cargo:rustc-link-search=native={}", lib_dir.display());
println!("cargo:rustc-link-lib=static=mbedcrypto");
// Generate bindings
let include_dir = IncludeDir::new(artifacts);
let headers = Headers::from_env().unwrap_or_default();
Bindgen::new(include_dir, headers).generate();
} | 30.65625 | 87 | 0.67788 |
563df92f6d7fb7873a725348ecc8f812ec9f87c1 | 637 | pub mod avg;
pub mod command;
pub mod eval;
pub mod max;
pub mod median;
pub mod min;
pub mod mode;
pub mod product;
pub mod stddev;
pub mod sum;
pub mod variance;
mod reducers;
mod utils;
pub use avg::SubCommand as MathAverage;
pub use command::Command as Math;
pub use eval::SubCommand as MathEval;
pub use max::SubCommand as MathMaximum;
pub use median::SubCommand as MathMedian;
pub use min::SubCommand as MathMinimum;
pub use mode::SubCommand as MathMode;
pub use product::SubCommand as MathProduct;
pub use stddev::SubCommand as MathStddev;
pub use sum::SubCommand as MathSummation;
pub use variance::SubCommand as MathVariance;
| 23.592593 | 45 | 0.78022 |
5d071bcd8a346fab5490223657b8a5c3578910cf | 1,425 | use fyrox_sound::engine::SoundEngine;
use fyrox_sound::{
buffer::{DataSource, SoundBufferResource},
context::SoundContext,
source::{SoundSourceBuilder, Status},
};
use std::{thread, time::Duration};
fn main() {
// Initialize sound engine with default output device.
let engine = SoundEngine::new();
// Initialize new sound context.
let context = SoundContext::new();
engine.lock().unwrap().add_context(context.clone());
// Create sine wave.
let sample_rate = 44100;
let sine_wave = DataSource::Raw {
sample_rate,
channel_count: 1,
samples: {
let frequency = 440.0;
let amplitude = 0.75;
(0..44100)
.map(|i| {
amplitude
* ((2.0 * std::f32::consts::PI * i as f32 * frequency) / sample_rate as f32)
.sin()
})
.collect()
},
};
let sine_wave_buffer = SoundBufferResource::new_generic(sine_wave).unwrap();
// Create generic source (without spatial effects) using that buffer.
let source = SoundSourceBuilder::new()
.with_buffer(sine_wave_buffer)
.with_status(Status::Playing)
.with_looping(true)
.build()
.unwrap();
context.state().add_source(source);
// Play sound for some time.
thread::sleep(Duration::from_secs(10));
}
| 27.941176 | 100 | 0.573333 |
11bdffd95397ded04e33a1c795021624e9466ddf | 2,301 | use serde::{Deserialize, Serialize};
use type_uuid::*;
use atelier_loader::handle::Handle;
use bevy::{
app::{AppExit, ScheduleRunnerSettings},
prelude::*,
reflect::ReflectPlugin,
utils::Duration,
};
use bevy_atelier::AssetPlugin;
use bevy_atelier::{image::Image, AddAsset, AssetServer, AssetServerSettings, Assets};
fn main() {
let mut app = App::build();
app
// Try creating a packfile using atelier-cli and uncommenting this line
// .add_resource(AssetServerSettings::default_packfile())
.add_plugins(MinimalPlugins)
.add_resource(ScheduleRunnerSettings::run_loop(Duration::from_secs_f64(
1.0 / 60.0,
)))
.add_resource(bevy::log::LogSettings {
level: bevy::log::Level::INFO,
..Default::default()
})
.add_plugin(AssetPlugin)
.add_asset::<bevy_atelier::image::Image>()
.add_asset::<MyCustomAsset>()
.add_startup_system(load_the_thing.system())
.add_system(use_the_thing.system());
if cfg!(feature = "atelier-daemon-headless") {
loop {}
} else {
app.run();
}
}
#[derive(TypeUuid, Serialize, Deserialize)]
#[uuid = "43b8d830-3da6-4cc2-999d-4d62fad1a1bb"]
struct MyCustomAsset(u32);
struct ThingHandle(Handle<Image>);
struct MyCustomHandle(Handle<MyCustomAsset>);
fn load_the_thing(commands: &mut Commands, asset_server: Res<AssetServer>, mut custom_assets: ResMut<Assets<MyCustomAsset>>) {
std::thread::sleep(std::time::Duration::from_millis(100));
let handle: Handle<Image> = asset_server.load("bevy_logo.png");
println!("{:?}", handle);
commands.insert_resource(ThingHandle(handle));
let handle = custom_assets.add(MyCustomAsset(10));
commands.insert_resource(MyCustomHandle(handle));
}
fn use_the_thing(
thing_handle: Res<ThingHandle>,
custom_handle: Res<MyCustomHandle>,
custom_assets: Res<Assets<MyCustomAsset>>,
images: Res<Assets<Image>>,
mut app_exit: ResMut<Events<AppExit>>,
) {
if let Some(image) = images.get(&thing_handle.0) {
println!("Image found!");
if let Some(custom_asset) = custom_assets.get(&custom_handle.0) {
println!("custom asset found: {}", custom_asset.0);
app_exit.send(AppExit);
}
}
}
| 33.347826 | 126 | 0.661886 |
236e328c3ddf60a874d719fd61a641eea7b2c596 | 5,501 | #[doc = "Register `EVENTS_SEQEND[%s]` reader"]
pub struct R(crate::R<EVENTS_SEQEND_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<EVENTS_SEQEND_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<EVENTS_SEQEND_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<EVENTS_SEQEND_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `EVENTS_SEQEND[%s]` writer"]
pub struct W(crate::W<EVENTS_SEQEND_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<EVENTS_SEQEND_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<EVENTS_SEQEND_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<EVENTS_SEQEND_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Emitted at end of every sequence n, when last value from RAM has been applied to wave counter\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum EVENTS_SEQEND_A {
#[doc = "0: Event not generated"]
NOTGENERATED = 0,
#[doc = "1: Event generated"]
GENERATED = 1,
}
impl From<EVENTS_SEQEND_A> for bool {
#[inline(always)]
fn from(variant: EVENTS_SEQEND_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `EVENTS_SEQEND` reader - Emitted at end of every sequence n, when last value from RAM has been applied to wave counter"]
pub struct EVENTS_SEQEND_R(crate::FieldReader<bool, EVENTS_SEQEND_A>);
impl EVENTS_SEQEND_R {
pub(crate) fn new(bits: bool) -> Self {
EVENTS_SEQEND_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> EVENTS_SEQEND_A {
match self.bits {
false => EVENTS_SEQEND_A::NOTGENERATED,
true => EVENTS_SEQEND_A::GENERATED,
}
}
#[doc = "Checks if the value of the field is `NOTGENERATED`"]
#[inline(always)]
pub fn is_not_generated(&self) -> bool {
**self == EVENTS_SEQEND_A::NOTGENERATED
}
#[doc = "Checks if the value of the field is `GENERATED`"]
#[inline(always)]
pub fn is_generated(&self) -> bool {
**self == EVENTS_SEQEND_A::GENERATED
}
}
impl core::ops::Deref for EVENTS_SEQEND_R {
type Target = crate::FieldReader<bool, EVENTS_SEQEND_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `EVENTS_SEQEND` writer - Emitted at end of every sequence n, when last value from RAM has been applied to wave counter"]
pub struct EVENTS_SEQEND_W<'a> {
w: &'a mut W,
}
impl<'a> EVENTS_SEQEND_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: EVENTS_SEQEND_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Event not generated"]
#[inline(always)]
pub fn not_generated(self) -> &'a mut W {
self.variant(EVENTS_SEQEND_A::NOTGENERATED)
}
#[doc = "Event generated"]
#[inline(always)]
pub fn generated(self) -> &'a mut W {
self.variant(EVENTS_SEQEND_A::GENERATED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | (value as u32 & 0x01);
self.w
}
}
impl R {
#[doc = "Bit 0 - Emitted at end of every sequence n, when last value from RAM has been applied to wave counter"]
#[inline(always)]
pub fn events_seqend(&self) -> EVENTS_SEQEND_R {
EVENTS_SEQEND_R::new((self.bits & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Emitted at end of every sequence n, when last value from RAM has been applied to wave counter"]
#[inline(always)]
pub fn events_seqend(&mut self) -> EVENTS_SEQEND_W {
EVENTS_SEQEND_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Description collection: Emitted at end of every sequence n, when last value from RAM has been applied to wave counter\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [events_seqend](index.html) module"]
pub struct EVENTS_SEQEND_SPEC;
impl crate::RegisterSpec for EVENTS_SEQEND_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [events_seqend::R](R) reader structure"]
impl crate::Readable for EVENTS_SEQEND_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [events_seqend::W](W) writer structure"]
impl crate::Writable for EVENTS_SEQEND_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets EVENTS_SEQEND[%s]
to value 0"]
impl crate::Resettable for EVENTS_SEQEND_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 34.167702 | 511 | 0.627704 |
7a83ef50e3cc1ea6e6dececb3b9e0c7fb0f71d96 | 3,388 | #[doc = "Reader of register HSTADDR1"]
pub type R = crate::R<u32, super::HSTADDR1>;
#[doc = "Writer for register HSTADDR1"]
pub type W = crate::W<u32, super::HSTADDR1>;
#[doc = "Register HSTADDR1 `reset()`'s with value 0"]
impl crate::ResetValue for super::HSTADDR1 {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
#[doc = "Reader of field `HSTADDRP0`"]
pub type HSTADDRP0_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `HSTADDRP0`"]
pub struct HSTADDRP0_W<'a> {
w: &'a mut W,
}
impl<'a> HSTADDRP0_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x7f) | ((value as u32) & 0x7f);
self.w
}
}
#[doc = "Reader of field `HSTADDRP1`"]
pub type HSTADDRP1_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `HSTADDRP1`"]
pub struct HSTADDRP1_W<'a> {
w: &'a mut W,
}
impl<'a> HSTADDRP1_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x7f << 8)) | (((value as u32) & 0x7f) << 8);
self.w
}
}
#[doc = "Reader of field `HSTADDRP2`"]
pub type HSTADDRP2_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `HSTADDRP2`"]
pub struct HSTADDRP2_W<'a> {
w: &'a mut W,
}
impl<'a> HSTADDRP2_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x7f << 16)) | (((value as u32) & 0x7f) << 16);
self.w
}
}
#[doc = "Reader of field `HSTADDRP3`"]
pub type HSTADDRP3_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `HSTADDRP3`"]
pub struct HSTADDRP3_W<'a> {
w: &'a mut W,
}
impl<'a> HSTADDRP3_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x7f << 24)) | (((value as u32) & 0x7f) << 24);
self.w
}
}
impl R {
#[doc = "Bits 0:6 - USB Host Address"]
#[inline(always)]
pub fn hstaddrp0(&self) -> HSTADDRP0_R {
HSTADDRP0_R::new((self.bits & 0x7f) as u8)
}
#[doc = "Bits 8:14 - USB Host Address"]
#[inline(always)]
pub fn hstaddrp1(&self) -> HSTADDRP1_R {
HSTADDRP1_R::new(((self.bits >> 8) & 0x7f) as u8)
}
#[doc = "Bits 16:22 - USB Host Address"]
#[inline(always)]
pub fn hstaddrp2(&self) -> HSTADDRP2_R {
HSTADDRP2_R::new(((self.bits >> 16) & 0x7f) as u8)
}
#[doc = "Bits 24:30 - USB Host Address"]
#[inline(always)]
pub fn hstaddrp3(&self) -> HSTADDRP3_R {
HSTADDRP3_R::new(((self.bits >> 24) & 0x7f) as u8)
}
}
impl W {
#[doc = "Bits 0:6 - USB Host Address"]
#[inline(always)]
pub fn hstaddrp0(&mut self) -> HSTADDRP0_W {
HSTADDRP0_W { w: self }
}
#[doc = "Bits 8:14 - USB Host Address"]
#[inline(always)]
pub fn hstaddrp1(&mut self) -> HSTADDRP1_W {
HSTADDRP1_W { w: self }
}
#[doc = "Bits 16:22 - USB Host Address"]
#[inline(always)]
pub fn hstaddrp2(&mut self) -> HSTADDRP2_W {
HSTADDRP2_W { w: self }
}
#[doc = "Bits 24:30 - USB Host Address"]
#[inline(always)]
pub fn hstaddrp3(&mut self) -> HSTADDRP3_W {
HSTADDRP3_W { w: self }
}
}
| 30.25 | 86 | 0.564345 |
de7703bf37529a469af34c2e31bbfdc7e533bb3e | 7,445 | use instruction_def::*;
use test::run_test;
use Operand::*;
use Reg::*;
use RegScale::*;
use RegType::*;
use {BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
#[test]
fn vpcmpub_1() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPCMPUB,
operand1: Some(Direct(K4)),
operand2: Some(Direct(XMM3)),
operand3: Some(Direct(XMM4)),
operand4: Some(Literal8(93)),
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K7),
broadcast: None,
},
&[98, 243, 101, 15, 62, 228, 93],
OperandSize::Dword,
)
}
#[test]
fn vpcmpub_2() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPCMPUB,
operand1: Some(Direct(K7)),
operand2: Some(Direct(XMM7)),
operand3: Some(Indirect(EAX, Some(OperandSize::Xmmword), None)),
operand4: Some(Literal8(18)),
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K2),
broadcast: None,
},
&[98, 243, 69, 10, 62, 56, 18],
OperandSize::Dword,
)
}
#[test]
fn vpcmpub_3() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPCMPUB,
operand1: Some(Direct(K7)),
operand2: Some(Direct(XMM10)),
operand3: Some(Direct(XMM7)),
operand4: Some(Literal8(79)),
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K1),
broadcast: None,
},
&[98, 243, 45, 9, 62, 255, 79],
OperandSize::Qword,
)
}
#[test]
fn vpcmpub_4() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPCMPUB,
operand1: Some(Direct(K4)),
operand2: Some(Direct(XMM9)),
operand3: Some(Indirect(RAX, Some(OperandSize::Xmmword), None)),
operand4: Some(Literal8(18)),
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K3),
broadcast: None,
},
&[98, 243, 53, 11, 62, 32, 18],
OperandSize::Qword,
)
}
#[test]
fn vpcmpub_5() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPCMPUB,
operand1: Some(Direct(K5)),
operand2: Some(Direct(YMM1)),
operand3: Some(Direct(YMM7)),
operand4: Some(Literal8(16)),
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K3),
broadcast: None,
},
&[98, 243, 117, 43, 62, 239, 16],
OperandSize::Dword,
)
}
#[test]
fn vpcmpub_6() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPCMPUB,
operand1: Some(Direct(K2)),
operand2: Some(Direct(YMM7)),
operand3: Some(IndirectDisplaced(
EBX,
455237098,
Some(OperandSize::Ymmword),
None,
)),
operand4: Some(Literal8(13)),
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K1),
broadcast: None,
},
&[98, 243, 69, 41, 62, 147, 234, 93, 34, 27, 13],
OperandSize::Dword,
)
}
#[test]
fn vpcmpub_7() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPCMPUB,
operand1: Some(Direct(K4)),
operand2: Some(Direct(YMM28)),
operand3: Some(Direct(YMM19)),
operand4: Some(Literal8(74)),
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K3),
broadcast: None,
},
&[98, 179, 29, 35, 62, 227, 74],
OperandSize::Qword,
)
}
#[test]
fn vpcmpub_8() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPCMPUB,
operand1: Some(Direct(K2)),
operand2: Some(Direct(YMM8)),
operand3: Some(IndirectScaledDisplaced(
RAX,
Four,
2012339439,
Some(OperandSize::Ymmword),
None,
)),
operand4: Some(Literal8(26)),
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K4),
broadcast: None,
},
&[98, 243, 61, 44, 62, 20, 133, 239, 220, 241, 119, 26],
OperandSize::Qword,
)
}
#[test]
fn vpcmpub_9() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPCMPUB,
operand1: Some(Direct(K5)),
operand2: Some(Direct(ZMM1)),
operand3: Some(Direct(ZMM1)),
operand4: Some(Literal8(112)),
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K6),
broadcast: None,
},
&[98, 243, 117, 78, 62, 233, 112],
OperandSize::Dword,
)
}
#[test]
fn vpcmpub_10() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPCMPUB,
operand1: Some(Direct(K6)),
operand2: Some(Direct(ZMM7)),
operand3: Some(IndirectScaledDisplaced(
EDX,
Two,
1038204386,
Some(OperandSize::Zmmword),
None,
)),
operand4: Some(Literal8(98)),
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K5),
broadcast: None,
},
&[98, 243, 69, 77, 62, 52, 85, 226, 189, 225, 61, 98],
OperandSize::Dword,
)
}
#[test]
fn vpcmpub_11() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPCMPUB,
operand1: Some(Direct(K3)),
operand2: Some(Direct(ZMM12)),
operand3: Some(Direct(ZMM23)),
operand4: Some(Literal8(87)),
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K6),
broadcast: None,
},
&[98, 179, 29, 78, 62, 223, 87],
OperandSize::Qword,
)
}
#[test]
fn vpcmpub_12() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPCMPUB,
operand1: Some(Direct(K5)),
operand2: Some(Direct(ZMM25)),
operand3: Some(IndirectScaledIndexedDisplaced(
RCX,
RDI,
Two,
902889952,
Some(OperandSize::Zmmword),
None,
)),
operand4: Some(Literal8(6)),
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K2),
broadcast: None,
},
&[98, 243, 53, 66, 62, 172, 121, 224, 1, 209, 53, 6],
OperandSize::Qword,
)
}
| 26.214789 | 95 | 0.47186 |
480ec07b53906a4f8a500ae34816b6efbf115707 | 5,709 | use std::{convert::Infallible, str::Utf8Error, string::FromUtf8Error, sync::Arc};
use bonsaidb_core::{
permissions::PermissionDenied,
schema::{view, InvalidNameError},
AnyError,
};
use nebari::AbortError;
use crate::database::compat::UnknownVersion;
/// Errors that can occur from interacting with storage.
#[derive(thiserror::Error, Debug)]
pub enum Error {
/// An error occurred interacting with the storage layer, `nebari`.
#[error("error from storage: {0}")]
Nebari(#[from] nebari::Error),
/// An error occurred serializing the underlying database structures.
#[error("error while serializing internal structures: {0}")]
InternalSerialization(String),
/// An error occurred serializing the contents of a `Document` or results of a `View`.
#[error("error while serializing: {0}")]
Serialization(#[from] pot::Error),
/// An internal error occurred while waiting for or sending a message.
#[error("error while communicating internally")]
InternalCommunication,
/// A transaction was too large to execute.
#[error("transaction is too large")]
TransactionTooLarge,
/// An error occurred while executing a view
#[error("error from view: {0}")]
View(#[from] view::Error),
/// An error occurred in the secrets storage layer.
#[error("a vault error occurred: {0}")]
#[cfg(feature = "encryption")]
Vault(#[from] crate::vault::Error),
/// An error occurred decompressing a stored value.
#[error("a vault error occurred: {0}")]
#[cfg(feature = "compression")]
Compression(#[from] lz4_flex::block::DecompressError),
/// A collection requested to be encrypted, but encryption is disabled.
#[error("encryption is disabled, but a collection is requesting encryption")]
#[cfg(not(feature = "encryption"))]
EncryptionDisabled,
/// An core error occurred.
#[error("a core error occurred: {0}")]
Core(#[from] bonsaidb_core::Error),
/// A tokio task failed to execute.
#[error("a concurrency error ocurred: {0}")]
TaskJoin(#[from] tokio::task::JoinError),
/// A tokio task failed to execute.
#[error("an IO error occurred: {0}")]
Io(#[from] tokio::io::Error),
/// An error occurred from a job and couldn't be unwrapped due to clones.
#[error("an error from a job occurred: {0}")]
Job(Arc<Error>),
/// An error occurred from backing up or restoring.
#[error("a backup error: {0}")]
Backup(Box<dyn AnyError>),
/// An error occurred with a password hash.
#[cfg(feature = "password-hashing")]
#[error("password hash error: {0}")]
PasswordHash(String),
}
impl From<flume::RecvError> for Error {
fn from(_: flume::RecvError) -> Self {
Self::InternalCommunication
}
}
impl From<bincode::Error> for Error {
fn from(err: bincode::Error) -> Self {
Self::InternalSerialization(err.to_string())
}
}
impl<T> From<UnknownVersion<T>> for Error {
fn from(err: UnknownVersion<T>) -> Self {
Self::InternalSerialization(err.to_string())
}
}
#[cfg(feature = "password-hashing")]
impl From<argon2::Error> for Error {
fn from(err: argon2::Error) -> Self {
Self::PasswordHash(err.to_string())
}
}
#[cfg(feature = "password-hashing")]
impl From<argon2::password_hash::Error> for Error {
fn from(err: argon2::password_hash::Error) -> Self {
Self::PasswordHash(err.to_string())
}
}
impl From<tokio::sync::oneshot::error::RecvError> for Error {
fn from(_: tokio::sync::oneshot::error::RecvError) -> Self {
Self::InternalCommunication
}
}
impl From<tokio::sync::oneshot::error::TryRecvError> for Error {
fn from(_: tokio::sync::oneshot::error::TryRecvError) -> Self {
Self::InternalCommunication
}
}
impl From<Error> for bonsaidb_core::Error {
fn from(err: Error) -> Self {
match err {
Error::View(view::Error::Core(core)) | Error::Core(core) => core,
other => Self::Database(other.to_string()),
}
}
}
impl From<Arc<Error>> for Error {
fn from(err: Arc<Error>) -> Self {
match Arc::try_unwrap(err) {
Ok(err) => err,
Err(still_wrapped) => Error::Job(still_wrapped),
}
}
}
impl From<FromUtf8Error> for Error {
fn from(err: FromUtf8Error) -> Self {
Self::Core(bonsaidb_core::Error::InvalidUnicode(err.to_string()))
}
}
impl From<Utf8Error> for Error {
fn from(err: Utf8Error) -> Self {
Self::Core(bonsaidb_core::Error::InvalidUnicode(err.to_string()))
}
}
impl From<InvalidNameError> for Error {
fn from(err: InvalidNameError) -> Self {
Self::Core(bonsaidb_core::Error::from(err))
}
}
impl From<AbortError<Infallible>> for Error {
fn from(err: AbortError<Infallible>) -> Self {
match err {
AbortError::Nebari(error) => Self::Nebari(error),
AbortError::Other(_) => unreachable!(),
}
}
}
impl From<AbortError<Error>> for Error {
fn from(err: AbortError<Error>) -> Self {
match err {
AbortError::Nebari(error) => Self::Nebari(error),
AbortError::Other(error) => error,
}
}
}
impl From<PermissionDenied> for Error {
fn from(err: PermissionDenied) -> Self {
Self::Core(bonsaidb_core::Error::from(err))
}
}
#[test]
fn test_converting_error() {
use serde::ser::Error as _;
let err: bonsaidb_core::Error = Error::Serialization(pot::Error::custom("mymessage")).into();
match err {
bonsaidb_core::Error::Database(storage_error) => {
assert!(storage_error.contains("mymessage"));
}
_ => unreachable!(),
}
}
| 29.276923 | 97 | 0.631809 |
6913417ce2be487a24e3c0dd004a2fdb38b77466 | 2,973 | // This file is part of HydraDX.
// Copyright (C) 2020-2021 Intergalactic, Limited (GIB).
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![cfg_attr(not(feature = "std"), no_std)]
use codec::{Decode, Encode};
#[cfg(feature = "std")]
use frame_support::traits::GenesisBuild;
#[cfg(feature = "std")]
use serde::{Deserialize, Serialize};
#[cfg(feature = "std")]
use sp_core::bytes;
use sp_core::RuntimeDebug;
use sp_std::vec::Vec;
#[cfg(test)]
mod mock;
#[cfg(test)]
mod tests;
#[derive(PartialEq, Eq, Clone, PartialOrd, Ord, Default, Encode, Decode, RuntimeDebug, derive_more::From)]
#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Hash))]
pub struct BlockHash(#[cfg_attr(feature = "std", serde(with = "bytes"))] pub Vec<u8>);
#[derive(Debug, Encode, Decode, Clone, Default, PartialEq, Eq)]
#[cfg_attr(feature = "std", derive(Serialize, Deserialize))]
pub struct Chain {
pub genesis_hash: BlockHash,
pub last_block_hash: BlockHash,
}
// Re-export pallet items so that they can be accessed from the crate namespace.
pub use pallet::*;
#[frame_support::pallet]
pub mod pallet {
use super::*;
use frame_support::pallet_prelude::*;
use frame_system::pallet_prelude::*;
#[pallet::config]
pub trait Config: frame_system::Config {}
#[pallet::pallet]
#[pallet::generate_store(pub(super) trait Store)]
pub struct Pallet<T>(_);
#[pallet::storage]
#[pallet::getter(fn previous_chain)]
pub type PreviousChain<T: Config> = StorageValue<_, Chain, ValueQuery>;
#[pallet::genesis_config]
pub struct GenesisConfig {
pub previous_chain: Chain,
}
#[pallet::genesis_build]
impl<T: Config> GenesisBuild<T> for GenesisConfig {
fn build(&self) {
PreviousChain::<T>::put(self.previous_chain.clone());
}
}
#[cfg(feature = "std")]
impl Default for GenesisConfig {
fn default() -> Self {
GenesisConfig {
previous_chain: { Chain::default() },
}
}
}
#[cfg(feature = "std")]
impl GenesisConfig {
pub fn build_storage<T: Config>(&self) -> Result<sp_runtime::Storage, String> {
<Self as frame_support::traits::GenesisBuild<T>>::build_storage(self)
}
pub fn assimilate_storage<T: Config>(&self, storage: &mut sp_runtime::Storage) -> Result<(), String> {
<Self as frame_support::traits::GenesisBuild<T>>::assimilate_storage(self, storage)
}
}
#[pallet::hooks]
impl<T: Config> Hooks<BlockNumberFor<T>> for Pallet<T> {}
#[pallet::call]
impl<T: Config> Pallet<T> {}
}
| 28.586538 | 106 | 0.702657 |
876d2184a783bc0ffca35e2b03d08b175472f10b | 2,454 | use super::{db, AppState, Note};
use crossterm::{
event::{poll, read, Event, KeyCode, KeyModifiers},
execute,
terminal::{disable_raw_mode, LeaveAlternateScreen},
};
use std::{
io::{stdout, Write},
time::Duration,
};
use crate::app_state::FocusedBlock;
pub fn handle_notes_list_events(state: &mut AppState) -> Result<(), Box<dyn std::error::Error>> {
if poll(Duration::from_millis(500))? {
match read()? {
Event::Key(event) => {
if event.modifiers == KeyModifiers::CONTROL {
match event.code {
KeyCode::Char('j') => state.next_note(),
KeyCode::Char('k') => state.previous_note(),
KeyCode::Char('n') => db::insert_note(Note::new())?,
KeyCode::Char('t') => state.focused = FocusedBlock::TITLE,
KeyCode::Char('c') => state.focused = FocusedBlock::CONTENTS,
KeyCode::Char('d') => {
if let Some(selected_note) = state.selected_note_id() {
let note = db::get_note(selected_note)?;
db::delete_note(note)?;
}
}
KeyCode::Char('q') => {
disable_raw_mode()?;
execute!(stdout(), LeaveAlternateScreen)?;
std::process::exit(1)
}
_ => (),
}
} else {
match event.code {
KeyCode::Enter => {
// TODO: Implement
}
KeyCode::Backspace => match state.focused {
FocusedBlock::TITLE => state.rmv_character_from_title()?,
FocusedBlock::CONTENTS => state.rmv_character_from_content()?,
},
KeyCode::Char(character) => match state.focused {
FocusedBlock::TITLE => state.add_character_to_title(character)?,
FocusedBlock::CONTENTS => state.add_character_to_content(character)?,
},
_ => (),
}
}
}
_ => (),
}
}
Ok(())
}
| 40.9 | 97 | 0.418908 |
214fd3cc3dcf3b4ec7c479bdacffc3f510247b79 | 2,152 | use crate::errors::*;
use crate::types::*;
use uuid::Uuid;
/// Thumbnail image of a very poor quality and low resolution
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct Minithumbnail {
#[doc(hidden)]
#[serde(rename(serialize = "@extra", deserialize = "@extra"))]
extra: Option<String>,
#[serde(rename(serialize = "@client_id", deserialize = "@client_id"))]
client_id: Option<i32>,
/// Thumbnail width, usually doesn't exceed 40
width: i32,
/// Thumbnail height, usually doesn't exceed 40
height: i32,
/// The thumbnail in JPEG format
data: String,
}
impl RObject for Minithumbnail {
#[doc(hidden)]
fn extra(&self) -> Option<&str> {
self.extra.as_deref()
}
#[doc(hidden)]
fn client_id(&self) -> Option<i32> {
self.client_id
}
}
impl Minithumbnail {
pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> {
Ok(serde_json::from_str(json.as_ref())?)
}
pub fn builder() -> RTDMinithumbnailBuilder {
let mut inner = Minithumbnail::default();
inner.extra = Some(Uuid::new_v4().to_string());
RTDMinithumbnailBuilder { inner }
}
pub fn width(&self) -> i32 {
self.width
}
pub fn height(&self) -> i32 {
self.height
}
pub fn data(&self) -> &String {
&self.data
}
}
#[doc(hidden)]
pub struct RTDMinithumbnailBuilder {
inner: Minithumbnail,
}
impl RTDMinithumbnailBuilder {
pub fn build(&self) -> Minithumbnail {
self.inner.clone()
}
pub fn width(&mut self, width: i32) -> &mut Self {
self.inner.width = width;
self
}
pub fn height(&mut self, height: i32) -> &mut Self {
self.inner.height = height;
self
}
pub fn data<T: AsRef<str>>(&mut self, data: T) -> &mut Self {
self.inner.data = data.as_ref().to_string();
self
}
}
impl AsRef<Minithumbnail> for Minithumbnail {
fn as_ref(&self) -> &Minithumbnail {
self
}
}
impl AsRef<Minithumbnail> for RTDMinithumbnailBuilder {
fn as_ref(&self) -> &Minithumbnail {
&self.inner
}
}
| 23.139785 | 74 | 0.597119 |
b97c46c8df09886805999346d99540f073cd7fd3 | 30,582 | use futures::{channel::mpsc, prelude::*, stream::iter, SinkExt, StreamExt};
use log::debug;
use std::{
collections::VecDeque,
io::{self, ErrorKind},
pin::Pin,
sync::{atomic::Ordering, Arc},
task::{Context, Poll},
};
use tokio::io::AsyncWrite;
use tokio_util::codec::{length_delimited::LengthDelimitedCodec, Framed, FramedRead, FramedWrite};
use crate::{
buffer::{Buffer, SendResult},
builder::BeforeReceive,
channel::{mpsc as priority_mpsc, mpsc::Priority},
context::SessionContext,
protocol_handle_stream::{ServiceProtocolEvent, SessionProtocolEvent},
service::config::SessionConfig,
traits::Codec,
yamux::StreamHandle,
ProtocolId, StreamId,
};
/// Event generated/received by the protocol stream
#[derive(Debug)]
pub(crate) enum ProtocolEvent {
/// The protocol is normally open
Open {
/// Protocol name
proto_name: String,
/// Yamux sub stream handle handshake framed
substream: Box<Framed<StreamHandle, LengthDelimitedCodec>>,
/// Protocol version
version: String,
},
/// The protocol close
Close {
/// Stream id
id: StreamId,
/// Protocol id
proto_id: ProtocolId,
},
/// Protocol data outbound and inbound
Message {
/// Stream id
id: StreamId,
/// Protocol id
proto_id: ProtocolId,
/// Data
data: bytes::Bytes,
},
SelectError {
proto_name: Option<String>,
},
/// Codec error
Error {
/// Stream id
id: StreamId,
/// Protocol id
proto_id: ProtocolId,
/// Codec error
error: std::io::Error,
},
TimeoutCheck,
}
/// Each custom protocol in a session corresponds to a sub stream
/// Can be seen as the route of each protocol
pub(crate) struct Substream<U> {
substream: Framed<StreamHandle, U>,
id: StreamId,
proto_id: ProtocolId,
context: Arc<SessionContext>,
config: SessionConfig,
/// The buffer will be prioritized for send to underlying network
high_write_buf: VecDeque<bytes::Bytes>,
// The buffer which will send to underlying network
write_buf: VecDeque<bytes::Bytes>,
dead: bool,
keep_buffer: bool,
/// Send event to session
event_sender: Buffer<ProtocolEvent>,
/// Receive events from session
event_receiver: priority_mpsc::Receiver<ProtocolEvent>,
service_proto_sender: Option<Buffer<ServiceProtocolEvent>>,
session_proto_sender: Option<Buffer<SessionProtocolEvent>>,
before_receive: Option<BeforeReceive>,
}
impl<U> Substream<U>
where
U: Codec + Unpin,
{
pub fn proto_open(&mut self, version: String) {
if let Some(ref mut buffer) = self.service_proto_sender {
buffer.push(ServiceProtocolEvent::Connected {
session: self.context.clone(),
version: version.clone(),
})
}
if let Some(ref mut buffer) = self.session_proto_sender {
buffer.push(SessionProtocolEvent::Opened { version })
}
}
fn push_front(&mut self, priority: Priority, frame: bytes::Bytes) {
if priority.is_high() {
self.high_write_buf.push_front(frame);
} else {
self.write_buf.push_front(frame);
}
}
fn push_back(&mut self, priority: Priority, frame: bytes::Bytes) {
if priority.is_high() {
self.high_write_buf.push_back(frame);
} else {
self.write_buf.push_back(frame);
}
}
/// Sink `start_send` Ready -> data send to buffer
/// Sink `start_send` NotReady -> buffer full need poll complete
#[inline]
fn send_inner(
&mut self,
cx: &mut Context,
frame: bytes::Bytes,
priority: Priority,
) -> Result<bool, io::Error> {
let data_size = frame.len();
let mut sink = Pin::new(&mut self.substream);
match sink.as_mut().poll_ready(cx)? {
Poll::Ready(()) => {
sink.as_mut().start_send(frame)?;
self.context.decr_pending_data_size(data_size);
Ok(false)
}
Poll::Pending => {
self.push_front(priority, frame);
self.poll_complete(cx)?;
Ok(true)
}
}
}
/// Send data to the lower `yamux` sub stream
fn send_data(&mut self, cx: &mut Context) -> Result<(), io::Error> {
while let Some(frame) = self.high_write_buf.pop_front() {
if self.send_inner(cx, frame, Priority::High)? {
return Ok(());
}
}
while let Some(frame) = self.write_buf.pop_front() {
if self.send_inner(cx, frame, Priority::Normal)? {
return Ok(());
}
}
self.poll_complete(cx)?;
Ok(())
}
/// https://docs.rs/tokio/0.1.19/tokio/prelude/trait.Sink.html
/// Must use poll complete to ensure data send to lower-level
///
/// Sink `poll_complete` Ready -> no buffer remain, flush all
/// Sink `poll_complete` NotReady -> there is more work left to do, may wake up next poll
fn poll_complete(&mut self, cx: &mut Context) -> Result<bool, io::Error> {
match Pin::new(&mut self.substream).poll_flush(cx) {
Poll::Pending => Ok(true),
Poll::Ready(res) => res.map(|_| false),
}
}
/// Close protocol sub stream
fn close_proto_stream(&mut self, cx: &mut Context) {
self.event_receiver.close();
if let Poll::Ready(Err(e)) = Pin::new(self.substream.get_mut()).poll_shutdown(cx) {
log::trace!("sub stream poll shutdown err {}", e)
}
if !self.keep_buffer {
self.event_sender.clear()
}
if let Some(ref mut service_proto_sender) = self.service_proto_sender {
let (mut sender, mut events) = service_proto_sender.take();
events.push_back(ServiceProtocolEvent::Disconnected {
id: self.context.id,
});
crate::runtime::spawn(async move {
let mut iter = iter(events).map(Ok);
if let Err(e) = sender.send_all(&mut iter).await {
debug!("stream close event send to proto handle error: {:?}", e)
}
});
}
if let Some(ref mut session_proto_sender) = self.session_proto_sender {
let (mut sender, mut events) = session_proto_sender.take();
events.push_back(SessionProtocolEvent::Closed);
if self.context.closed.load(Ordering::SeqCst) {
events.push_back(SessionProtocolEvent::Disconnected);
}
crate::runtime::spawn(async move {
let mut iter = iter(events).map(Ok);
if let Err(e) = sender.send_all(&mut iter).await {
debug!("stream close event send to proto handle error: {:?}", e)
}
});
}
if !self.context.closed.load(Ordering::SeqCst) {
let (mut sender, mut events) = self.event_sender.take();
events.push_back(ProtocolEvent::Close {
id: self.id,
proto_id: self.proto_id,
});
crate::runtime::spawn(async move {
let mut iter = iter(events).map(Ok);
if let Err(e) = sender.send_all(&mut iter).await {
debug!("stream close event send to session error: {:?}", e)
}
});
} else {
self.output(cx);
}
}
/// When send or receive message error, output error and close stream
fn error_close(&mut self, cx: &mut Context, error: io::Error) {
self.dead = true;
match error.kind() {
ErrorKind::BrokenPipe
| ErrorKind::ConnectionAborted
| ErrorKind::ConnectionReset
| ErrorKind::NotConnected
| ErrorKind::UnexpectedEof => return,
_ => (),
}
self.event_sender.push(ProtocolEvent::Error {
id: self.id,
proto_id: self.proto_id,
error,
});
self.close_proto_stream(cx);
}
/// Handling commands send by session
fn handle_proto_event(&mut self, cx: &mut Context, event: ProtocolEvent, priority: Priority) {
match event {
ProtocolEvent::Message { data, .. } => {
self.push_back(priority, data);
if let Err(err) = self.send_data(cx) {
// Whether it is a read send error or a flush error,
// the most essential problem is that there is a problem with the external network.
// Close the protocol stream directly.
debug!(
"protocol [{}] close because of extern network",
self.proto_id
);
self.output_event(
cx,
ProtocolEvent::Error {
id: self.id,
proto_id: self.proto_id,
error: err,
},
);
self.dead = true;
}
}
ProtocolEvent::Close { .. } => {
self.write_buf.clear();
self.dead = true;
}
_ => (),
}
}
fn distribute_to_user_level(&mut self, cx: &mut Context) {
if let Some(ref mut buffer) = self.service_proto_sender {
match buffer.try_send(cx) {
SendResult::Disconnect => self.dead = true,
SendResult::Pending => debug!("service proto [{}] handle is full", self.proto_id),
SendResult::Ok => (),
}
}
if let Some(ref mut buffer) = self.session_proto_sender {
match buffer.try_send(cx) {
SendResult::Disconnect => self.dead = true,
SendResult::Pending => debug!("session proto [{}] handle is full", self.proto_id),
SendResult::Ok => (),
}
}
if self.dead {
self.output(cx);
}
}
/// Send event to user
#[inline]
fn output_event(&mut self, cx: &mut Context, event: ProtocolEvent) {
self.event_sender.push(event);
self.output(cx);
}
#[inline]
fn output(&mut self, cx: &mut Context) {
if let SendResult::Disconnect = self.event_sender.try_send(cx) {
debug!("proto send to session error: disconnect, may be kill by remote");
self.dead = true;
}
}
fn recv_event(&mut self, cx: &mut Context) -> Poll<Option<()>> {
if self.dead {
return Poll::Ready(None);
}
if self.write_buf.len() > self.config.send_event_size() {
return Poll::Pending;
}
match Pin::new(&mut self.event_receiver).as_mut().poll_next(cx) {
Poll::Ready(Some((priority, event))) => {
self.handle_proto_event(cx, event, priority);
Poll::Ready(Some(()))
}
Poll::Ready(None) => {
// Must be session close
self.dead = true;
if let Poll::Ready(Err(e)) = Pin::new(self.substream.get_mut()).poll_shutdown(cx) {
log::trace!("sub stream poll shutdown err {}", e)
}
Poll::Ready(None)
}
Poll::Pending => Poll::Pending,
}
}
fn recv_frame(&mut self, cx: &mut Context) -> Poll<Option<()>> {
if self.dead {
return Poll::Ready(None);
}
if self
.service_proto_sender
.as_ref()
.map(Buffer::len)
.unwrap_or_default()
> self.config.recv_event_size()
|| self
.session_proto_sender
.as_ref()
.map(Buffer::len)
.unwrap_or_default()
> self.config.recv_event_size()
{
return Poll::Pending;
}
match Pin::new(&mut self.substream).as_mut().poll_next(cx) {
Poll::Ready(Some(Ok(data))) => {
let data = match self.before_receive {
Some(ref function) => match function(data) {
Ok(data) => data,
Err(err) => {
self.error_close(cx, err);
return Poll::Ready(None);
}
},
None => data.freeze(),
};
if let Some(ref mut buffer) = self.session_proto_sender {
buffer.push(SessionProtocolEvent::Received { data: data.clone() })
}
if let Some(ref mut buffer) = self.service_proto_sender {
buffer.push(ServiceProtocolEvent::Received {
id: self.context.id,
data,
})
}
self.distribute_to_user_level(cx);
Poll::Ready(Some(()))
}
Poll::Ready(None) => {
debug!("protocol [{}] close", self.proto_id);
self.dead = true;
Poll::Ready(None)
}
Poll::Pending => Poll::Pending,
Poll::Ready(Some(Err(err))) => {
debug!("sub stream codec error: {:?}", err);
self.error_close(cx, err);
Poll::Ready(None)
}
}
}
#[inline]
fn flush(&mut self, cx: &mut Context) -> Result<(), io::Error> {
self.poll_complete(cx)?;
if !self
.service_proto_sender
.as_ref()
.map(|buffer| buffer.is_empty())
.unwrap_or(true)
|| !self
.session_proto_sender
.as_ref()
.map(|buffer| buffer.is_empty())
.unwrap_or(true)
{
self.distribute_to_user_level(cx);
}
if !self.event_sender.is_empty()
|| !self.write_buf.is_empty()
|| !self.high_write_buf.is_empty()
{
self.output(cx);
match self.send_data(cx) {
Ok(()) => Ok(()),
Err(err) => Err(err),
}
} else {
Ok(())
}
}
}
impl<U> Stream for Substream<U>
where
U: Codec + Unpin,
{
type Item = ();
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
// double check here
if self.dead || self.context.closed.load(Ordering::SeqCst) {
debug!(
"Substream({}) finished, self.dead || self.context.closed.load(Ordering::SeqCst), head",
self.id
);
self.close_proto_stream(cx);
return Poll::Ready(None);
}
if let Err(err) = self.flush(cx) {
debug!(
"Substream({}) finished with flush error: {:?}",
self.id, err
);
self.error_close(cx, err);
return Poll::Ready(None);
}
debug!(
"Substream({}) write buf: {}, read buf: {}",
self.id,
self.write_buf.len(),
self.event_sender.len()
);
let mut is_pending = self.recv_frame(cx).is_pending();
is_pending &= self.recv_event(cx).is_pending();
if is_pending {
Poll::Pending
} else {
Poll::Ready(Some(()))
}
}
}
pub(crate) struct SubstreamBuilder {
id: StreamId,
proto_id: ProtocolId,
keep_buffer: bool,
config: SessionConfig,
context: Arc<SessionContext>,
service_proto_sender: Option<Buffer<ServiceProtocolEvent>>,
session_proto_sender: Option<Buffer<SessionProtocolEvent>>,
before_receive: Option<BeforeReceive>,
/// Send event to session
event_sender: mpsc::Sender<ProtocolEvent>,
/// Receive events from session
event_receiver: priority_mpsc::Receiver<ProtocolEvent>,
}
impl SubstreamBuilder {
pub fn new(
event_sender: mpsc::Sender<ProtocolEvent>,
event_receiver: priority_mpsc::Receiver<ProtocolEvent>,
context: Arc<SessionContext>,
) -> Self {
SubstreamBuilder {
service_proto_sender: None,
session_proto_sender: None,
before_receive: None,
event_receiver,
event_sender,
context,
id: 0,
proto_id: 0.into(),
keep_buffer: false,
config: SessionConfig::default(),
}
}
pub fn stream_id(mut self, id: StreamId) -> Self {
self.id = id;
self
}
pub fn proto_id(mut self, id: ProtocolId) -> Self {
self.proto_id = id;
self
}
pub fn config(mut self, config: SessionConfig) -> Self {
self.config = config;
self
}
pub fn keep_buffer(mut self, keep: bool) -> Self {
self.keep_buffer = keep;
self
}
pub fn service_proto_sender(mut self, sender: Option<Buffer<ServiceProtocolEvent>>) -> Self {
self.service_proto_sender = sender;
self
}
pub fn session_proto_sender(mut self, sender: Option<Buffer<SessionProtocolEvent>>) -> Self {
self.session_proto_sender = sender;
self
}
pub fn before_receive(mut self, f: Option<BeforeReceive>) -> Self {
self.before_receive = f;
self
}
pub fn build<U>(self, substream: Framed<StreamHandle, U>) -> Substream<U>
where
U: Codec,
{
Substream {
substream,
id: self.id,
proto_id: self.proto_id,
config: self.config,
context: self.context,
high_write_buf: VecDeque::new(),
write_buf: VecDeque::new(),
dead: false,
keep_buffer: self.keep_buffer,
event_sender: Buffer::new(self.event_sender),
event_receiver: self.event_receiver,
service_proto_sender: self.service_proto_sender,
session_proto_sender: self.session_proto_sender,
before_receive: self.before_receive,
}
}
}
/* Code organization under read-write separation */
pub(crate) struct SubstreamWritePart<U> {
substream: FramedWrite<crate::runtime::WriteHalf<StreamHandle>, U>,
id: StreamId,
proto_id: ProtocolId,
dead: bool,
config: SessionConfig,
/// The buffer will be prioritized for send to underlying network
high_write_buf: VecDeque<bytes::Bytes>,
// The buffer which will send to underlying network
write_buf: VecDeque<bytes::Bytes>,
/// Send event to session
event_sender: Buffer<ProtocolEvent>,
/// Receive events from session
event_receiver: priority_mpsc::Receiver<ProtocolEvent>,
context: Arc<SessionContext>,
}
impl<U> SubstreamWritePart<U>
where
U: Codec + Unpin,
{
fn push_front(&mut self, priority: Priority, frame: bytes::Bytes) {
if priority.is_high() {
self.high_write_buf.push_front(frame);
} else {
self.write_buf.push_front(frame);
}
}
fn push_back(&mut self, priority: Priority, frame: bytes::Bytes) {
if priority.is_high() {
self.high_write_buf.push_back(frame);
} else {
self.write_buf.push_back(frame);
}
}
/// Sink `start_send` Ready -> data send to buffer
/// Sink `start_send` NotReady -> buffer full need poll complete
#[inline]
fn send_inner(
&mut self,
cx: &mut Context,
frame: bytes::Bytes,
priority: Priority,
) -> Result<bool, io::Error> {
let data_size = frame.len();
let mut sink = Pin::new(&mut self.substream);
match sink.as_mut().poll_ready(cx)? {
Poll::Ready(()) => {
sink.as_mut().start_send(frame)?;
self.context.decr_pending_data_size(data_size);
Ok(false)
}
Poll::Pending => {
self.push_front(priority, frame);
self.poll_complete(cx)?;
Ok(true)
}
}
}
fn poll_complete(&mut self, cx: &mut Context) -> Result<bool, io::Error> {
match Pin::new(&mut self.substream).poll_flush(cx) {
Poll::Pending => Ok(true),
Poll::Ready(res) => res.map(|_| false),
}
}
/// Send data to the lower `yamux` sub stream
fn send_data(&mut self, cx: &mut Context) -> Result<(), io::Error> {
while let Some(frame) = self.high_write_buf.pop_front() {
if self.send_inner(cx, frame, Priority::High)? {
return Ok(());
}
}
while let Some(frame) = self.write_buf.pop_front() {
if self.send_inner(cx, frame, Priority::Normal)? {
return Ok(());
}
}
self.poll_complete(cx)?;
Ok(())
}
#[inline]
fn flush(&mut self, cx: &mut Context) -> Result<(), io::Error> {
self.poll_complete(cx)?;
if !self.event_sender.is_empty()
|| !self.write_buf.is_empty()
|| !self.high_write_buf.is_empty()
{
self.output(cx);
match self.send_data(cx) {
Ok(()) => Ok(()),
Err(err) => Err(err),
}
} else {
Ok(())
}
}
/// Handling commands send by session
fn handle_proto_event(&mut self, cx: &mut Context, event: ProtocolEvent, priority: Priority) {
match event {
ProtocolEvent::Message { data, .. } => {
self.push_back(priority, data);
if let Err(err) = self.send_data(cx) {
// Whether it is a read send error or a flush error,
// the most essential problem is that there is a problem with the external network.
// Close the protocol stream directly.
debug!(
"protocol [{}] close because of extern network",
self.proto_id
);
self.output_event(
cx,
ProtocolEvent::Error {
id: self.id,
proto_id: self.proto_id,
error: err,
},
);
self.dead = true;
}
}
ProtocolEvent::Close { .. } => {
self.write_buf.clear();
self.dead = true;
}
_ => (),
}
}
fn recv_event(&mut self, cx: &mut Context) -> Poll<Option<()>> {
if self.dead {
return Poll::Ready(None);
}
if self.write_buf.len() > self.config.send_event_size() {
return Poll::Pending;
}
match Pin::new(&mut self.event_receiver).as_mut().poll_next(cx) {
Poll::Ready(Some((priority, event))) => {
self.handle_proto_event(cx, event, priority);
Poll::Ready(Some(()))
}
Poll::Ready(None) => {
// Must be session close
self.dead = true;
if let Poll::Ready(Err(e)) = Pin::new(self.substream.get_mut()).poll_shutdown(cx) {
log::trace!("sub stream poll shutdown err {}", e)
}
Poll::Ready(None)
}
Poll::Pending => Poll::Pending,
}
}
/// When send or receive message error, output error and close stream
fn error_close(&mut self, cx: &mut Context, error: io::Error) {
self.dead = true;
match error.kind() {
ErrorKind::BrokenPipe
| ErrorKind::ConnectionAborted
| ErrorKind::ConnectionReset
| ErrorKind::NotConnected
| ErrorKind::UnexpectedEof => return,
_ => (),
}
self.event_sender.push(ProtocolEvent::Error {
id: self.id,
proto_id: self.proto_id,
error,
});
self.close_proto_stream(cx);
}
fn close_proto_stream(&mut self, cx: &mut Context) {
self.event_receiver.close();
if let Poll::Ready(Err(e)) = Pin::new(self.substream.get_mut()).poll_shutdown(cx) {
log::trace!("sub stream poll shutdown err {}", e)
}
if !self.context.closed.load(Ordering::SeqCst) {
let (mut sender, mut events) = self.event_sender.take();
events.push_back(ProtocolEvent::Close {
id: self.id,
proto_id: self.proto_id,
});
crate::runtime::spawn(async move {
let mut iter = iter(events).map(Ok);
if let Err(e) = sender.send_all(&mut iter).await {
debug!("stream close event send to session error: {:?}", e)
}
});
} else {
self.output(cx);
}
}
/// Send event to user
#[inline]
fn output_event(&mut self, cx: &mut Context, event: ProtocolEvent) {
self.event_sender.push(event);
self.output(cx);
}
#[inline]
fn output(&mut self, cx: &mut Context) {
if let SendResult::Disconnect = self.event_sender.try_send(cx) {
debug!("proto send to session error: disconnect, may be kill by remote");
self.dead = true;
}
}
}
impl<U> Stream for SubstreamWritePart<U>
where
U: Codec + Unpin,
{
type Item = ();
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
// double check here
if self.dead || self.context.closed.load(Ordering::SeqCst) {
debug!(
"Substream({}) finished, self.dead || self.context.closed.load(Ordering::SeqCst), head",
self.id
);
self.close_proto_stream(cx);
return Poll::Ready(None);
}
if let Err(err) = self.flush(cx) {
debug!(
"Substream({}) finished with flush error: {:?}",
self.id, err
);
self.error_close(cx, err);
return Poll::Ready(None);
}
debug!(
"Substream({}) write buf: {}, read buf: {}",
self.id,
self.write_buf.len(),
self.event_sender.len()
);
let is_pending = self.recv_event(cx).is_pending();
if is_pending {
Poll::Pending
} else {
Poll::Ready(Some(()))
}
}
}
/// Protocol Stream read part
pub struct SubstreamReadPart {
pub(crate) substream:
FramedRead<crate::runtime::ReadHalf<StreamHandle>, Box<dyn Codec + Send + 'static>>,
pub(crate) before_receive: Option<BeforeReceive>,
pub(crate) proto_id: ProtocolId,
pub(crate) stream_id: StreamId,
pub(crate) version: String,
pub(crate) close_sender: priority_mpsc::Sender<ProtocolEvent>,
}
impl SubstreamReadPart {
/// protocol id of this stream
pub fn protocol_id(&self) -> ProtocolId {
self.proto_id
}
/// protocol version
pub fn version(&self) -> &str {
self.version.as_str()
}
}
impl Drop for SubstreamReadPart {
fn drop(&mut self) {
let mut sender = self.close_sender.clone();
let id = self.stream_id;
let pid = self.proto_id;
crate::runtime::spawn(async move {
let _ignore = sender
.send(ProtocolEvent::Close { id, proto_id: pid })
.await;
});
}
}
impl Stream for SubstreamReadPart {
type Item = Result<bytes::Bytes, io::Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
match self.substream.poll_next_unpin(cx) {
Poll::Ready(Some(Ok(data))) => {
let data = match self.before_receive {
Some(ref function) => match function(data) {
Ok(data) => data,
Err(err) => {
return Poll::Ready(Some(Err(err)));
}
},
None => data.freeze(),
};
Poll::Ready(Some(Ok(data)))
}
Poll::Ready(None) => Poll::Ready(None),
Poll::Ready(Some(Err(err))) => Poll::Ready(Some(Err(err))),
Poll::Pending => Poll::Pending,
}
}
}
pub(crate) struct SubstreamWritePartBuilder {
id: StreamId,
proto_id: ProtocolId,
config: SessionConfig,
context: Arc<SessionContext>,
/// Send event to session
event_sender: mpsc::Sender<ProtocolEvent>,
/// Receive events from session
event_receiver: priority_mpsc::Receiver<ProtocolEvent>,
}
impl SubstreamWritePartBuilder {
pub fn new(
event_sender: mpsc::Sender<ProtocolEvent>,
event_receiver: priority_mpsc::Receiver<ProtocolEvent>,
context: Arc<SessionContext>,
) -> Self {
SubstreamWritePartBuilder {
event_receiver,
event_sender,
context,
id: 0,
proto_id: 0.into(),
config: SessionConfig::default(),
}
}
pub fn stream_id(mut self, id: StreamId) -> Self {
self.id = id;
self
}
pub fn proto_id(mut self, id: ProtocolId) -> Self {
self.proto_id = id;
self
}
pub fn config(mut self, config: SessionConfig) -> Self {
self.config = config;
self
}
pub fn build<U>(
self,
substream: FramedWrite<crate::runtime::WriteHalf<StreamHandle>, U>,
) -> SubstreamWritePart<U>
where
U: Codec,
{
SubstreamWritePart {
substream,
id: self.id,
proto_id: self.proto_id,
config: self.config,
context: self.context,
high_write_buf: VecDeque::new(),
write_buf: VecDeque::new(),
dead: false,
event_sender: Buffer::new(self.event_sender),
event_receiver: self.event_receiver,
}
}
}
| 30.7666 | 104 | 0.521352 |
f8014cb2ad9af38898ce7fcfb1e817275a10180c | 1,768 | use std::env::*;
use std::ffi::{OsString, OsStr};
use rand::{thread_rng, Rng};
use rand::distributions::Alphanumeric;
fn make_rand_name() -> OsString {
let rng = thread_rng();
let n = format!("TEST{}", rng.sample_iter(&Alphanumeric).take(10)
.collect::<String>());
let n = OsString::from(n);
assert!(var_os(&n).is_none());
n
}
fn eq(a: Option<OsString>, b: Option<&str>) {
assert_eq!(a.as_ref().map(|s| &**s), b.map(OsStr::new).map(|s| &*s));
}
#[test]
fn test_set_var() {
let n = make_rand_name();
set_var(&n, "VALUE");
eq(var_os(&n), Some("VALUE"));
}
#[test]
fn test_remove_var() {
let n = make_rand_name();
set_var(&n, "VALUE");
remove_var(&n);
eq(var_os(&n), None);
}
#[test]
fn test_set_var_overwrite() {
let n = make_rand_name();
set_var(&n, "1");
set_var(&n, "2");
eq(var_os(&n), Some("2"));
set_var(&n, "");
eq(var_os(&n), Some(""));
}
#[test]
#[cfg_attr(target_os = "emscripten", ignore)]
fn test_var_big() {
let mut s = "".to_string();
let mut i = 0;
while i < 100 {
s.push_str("aaaaaaaaaa");
i += 1;
}
let n = make_rand_name();
set_var(&n, &s);
eq(var_os(&n), Some(&s));
}
#[test]
#[cfg_attr(target_os = "emscripten", ignore)]
fn test_env_set_get_huge() {
let n = make_rand_name();
let s = "x".repeat(10000);
set_var(&n, &s);
eq(var_os(&n), Some(&s));
remove_var(&n);
eq(var_os(&n), None);
}
#[test]
fn test_env_set_var() {
let n = make_rand_name();
let mut e = vars_os();
set_var(&n, "VALUE");
assert!(!e.any(|(k, v)| {
&*k == &*n && &*v == "VALUE"
}));
assert!(vars_os().any(|(k, v)| {
&*k == &*n && &*v == "VALUE"
}));
}
| 21.047619 | 73 | 0.525452 |
b9137360362a8b1afb900e87c1f7adb8677f2159 | 8,677 | use std::error;
use std::collections::HashMap;
use std::convert::TryInto;
use tinyjson::JsonValue;
use stepflow::{Error, SessionId, data::InvalidValue};
use stepflow::object::IdError;
use stepflow_json::{parse_session_json, parse_statedata_json, json_value_from_statedata, StepFlowParseError};
use crate::session_advance_blockedon::advance_blockedon_to_json_obj;
use super::session_store::{new_session, get_session_store, get_session_store_mut};
pub fn create_session(json: &str, allow_implicit_var: bool) -> Result<SessionId, Box<dyn error::Error>> {
// FUTURE: pre-alloc the session size
let session_id = new_session()?;
let mut session_store = get_session_store_mut()?;
let mut session = session_store.get_mut(&session_id).ok_or_else(|| Error::SessionId(IdError::IdMissing(session_id)))?;
parse_session_json(&mut session, json, allow_implicit_var)?;
Ok(session_id)
}
#[derive(Debug)]
pub enum AdvanceSessionError {
ParseError(StepFlowParseError),
Error(stepflow::Error),
Other(Box<dyn std::error::Error>),
}
impl std::fmt::Display for AdvanceSessionError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self) // FUTURE: we can do better
}
}
impl std::error::Error for AdvanceSessionError {}
fn sessionid_from_val(session_id_val: i32) -> Result<SessionId, Error> {
let session_id_val: u16 = session_id_val.try_into().map_err(|_| Error::InvalidValue(InvalidValue::WrongValue))?;
Ok(SessionId::new(session_id_val))
}
pub fn advance_session(session_id_val: i32, step_output_json: Option<&str>) -> Result<HashMap<String, JsonValue>, Box<dyn std::error::Error>>
{
// get step output data (if any)
let session_id = sessionid_from_val(session_id_val).map_err(|e| AdvanceSessionError::Error(e))?;
let mut session_store = get_session_store_mut().map_err(|e| AdvanceSessionError::Other(e))?;
let session = session_store.get_mut(&session_id).ok_or_else(|| AdvanceSessionError::Error(Error::SessionId(IdError::IdMissing(session_id))))?;
let state_data = match step_output_json {
None => None,
Some(s) => {
let parsed = parse_statedata_json(s, session.var_store())
.map_err(|e| AdvanceSessionError::ParseError(e))?;
Some(parsed)
}
};
let current_step = session
.current_step()
.map_err(|e| AdvanceSessionError::Error(e))?
.clone();
let step_output = state_data.map(|state_data| {
(¤t_step, state_data)
});
let advance_result = session.advance(step_output).map_err(|e| AdvanceSessionError::Error(e))?;
let block_on_json = advance_blockedon_to_json_obj(&advance_result, session);
Ok(block_on_json)
}
pub fn get_statedata(session_id_val: i32) -> Result<JsonValue, Box<dyn std::error::Error>> {
let session_id = sessionid_from_val(session_id_val).map_err(|e| AdvanceSessionError::Error(e))?;
let session_store = get_session_store()?;
let session = session_store.get(&session_id).ok_or_else(|| AdvanceSessionError::Error(Error::SessionId(IdError::IdMissing(session_id))))?;
let json_value = json_value_from_statedata(session.state_data(), session.var_store())?;
Ok(json_value)
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use tinyjson::JsonValue;
use stepflow::data::{StringVar, StringValue};
use stepflow::AdvanceBlockedOn;
use stepflow::prelude::*;
use super::create_session;
use stepflow_json::statedata_from_jsonval_obj;
use crate::session_store::{get_session_store, get_session_store_mut};
const JSON: &str = r#"
{
"vars": {
"first_name": "String",
"last_name": "String",
"email": "Email",
"email_waited": "True",
"nothing": "Bool"
},
"steps": {
"$root": {
"substeps": ["name", "email"],
"outputs": ["first_name","last_name","email", "email_waited"]
},
"name": {
"outputs": ["first_name","last_name"]
},
"email": {
"outputs": ["email", "email_waited"]
}
},
"actions": {
"$all": {
"type": "UriStringTemplate",
"template": "/base-path/{{step}}"
},
"email": {
"type": "SetData",
"stateData": {
"email_waited": "true"
},
"afterAttempt": 2
}
}
}"#;
#[test]
fn deserialize() {
let session_id = create_session(JSON, false).unwrap();
let mut session_store = get_session_store_mut().unwrap();
let session = session_store.get_mut(&session_id).unwrap();
let name_stepid = session.step_store().get_by_name("name").unwrap().id().clone();
let email_stepid = session.step_store().get_by_name("email").unwrap().id().clone();
let _firstname_var_id = session.var_store().get_by_name("first_name").unwrap().id().clone();
let _email_waited_varid = session.var_store().get_by_name("email_waited").unwrap().id().clone();
let uri_action_id = session.action_store().id_from_name("$all").unwrap().clone();
// advance to first step (name)
let name_advance = session.advance(None).unwrap();
assert_eq!(name_advance, AdvanceBlockedOn::ActionStartWith(uri_action_id.clone(), "/base-path/name".parse::<StringValue>().unwrap().boxed()));
// try advancing without setting name and fail
let name_advance_fail = session.advance(None).unwrap();
assert_eq!(
name_advance_fail,
AdvanceBlockedOn::ActionStartWith(uri_action_id.clone(), "/base-path/name".parse::<StringValue>().unwrap().boxed()));
// advance to next step (email) - fail setdata (attempt #1) so get URI action result
let mut data_name = HashMap::new();
data_name.insert("first_name".to_owned(), JsonValue::String("billy".to_owned()));
data_name.insert("last_name".to_owned(), JsonValue::String("bob".to_owned()));
let statedata_name = statedata_from_jsonval_obj(&data_name, session.var_store()).unwrap();
let name_advance_success = session.advance(Some((&name_stepid, statedata_name))).unwrap();
assert_eq!(name_advance_success, AdvanceBlockedOn::ActionStartWith(uri_action_id.clone(), "/base-path/email".parse::<StringValue>().unwrap().boxed()));
// put in email and try advancing -- fail setdata (attempt #2) because email waited setdata action hasn't fired so get URI action result
let mut data_email = HashMap::new();
data_email.insert("email".to_owned(), JsonValue::String("[email protected]".to_owned()));
let statedata_email = statedata_from_jsonval_obj(&data_email, session.var_store()).unwrap();
let name_advance_success = session.advance(Some((&email_stepid, statedata_email))).unwrap();
assert_eq!(name_advance_success, AdvanceBlockedOn::ActionStartWith(uri_action_id.clone(), "/base-path/email".parse::<StringValue>().unwrap().boxed()));
// try advancing again -- success with setdata firing and we're finished
let name_advance_success = session.advance(None).unwrap();
assert_eq!(name_advance_success, AdvanceBlockedOn::FinishedAdvancing);
}
#[test]
fn session_ids() {
let session_id1 = create_session(JSON, false).unwrap();
let session_id2 = create_session(JSON, false).unwrap();
assert_ne!(session_id1, session_id2);
}
#[test]
fn implicit_vars() {
let json = r#"
{
"steps": {
"$root": {
"substeps": ["step1"],
"outputs": ["test_output"]
},
"step1": { "inputs": ["test_input"], "outputs": ["test_output"] }
},
"actions": {
"$all": { "type": "HtmlForm" }
}
}
"#;
let json = json.to_string();
// expect error when we don't allow implicit var
assert!(matches!(create_session(&json[..], false), Err(_)));
// create session
let session_id = create_session(&json[..], true).unwrap();
let session_store = get_session_store().unwrap();
let session = session_store.get(&session_id).unwrap();
assert_eq!(session.var_store().iter_names().count(), 2);
let input_var = session.var_store().get_by_name("test_input").unwrap();
assert!(input_var.is::<StringVar>());
let output_var = session.var_store().get_by_name("test_output").unwrap();
assert!(output_var.is::<StringVar>());
}
} | 41.917874 | 159 | 0.637548 |
2959be6a18692fa4dc287cc52b0cd3b2c126acfa | 8,876 |
// TODO: Remove all #[allow(dead_code)]
use ash::vk;
use gs::prelude::*;
use gsvk::prelude::common::*;
use gsvk::prelude::buffer::*;
use gsvk::prelude::pipeline::*;
use gsvk::prelude::command::*;
use gsvk::prelude::sync::*;
use gsvk::prelude::api::*;
use gsma::{ define_input, offset_of, vk_format, vertex_rate, data_size };
use std::path::{ Path, PathBuf };
const MANIFEST_PATH: &'static str = "src/01.triangle/Gensokyo.toml";
const VERTEX_SHADER_SPIRV_PATH : &'static str = "src/01.triangle/triangle.vert.spv";
const FRAGMENT_SHADER_SPIRV_PATH: &'static str = "src/01.triangle/triangle.frag.spv";
define_input! {
#[binding = 0, rate = vertex]
struct Vertex {
#[location = 0, format = vec2]
pos: [f32; 2],
#[location = 1, format = vec4]
color: [f32; 4],
}
}
const VERTEX_DATA: [Vertex; 3] = [
Vertex { pos: [ 0.0, -0.5], color: [1.0, 0.0, 0.0, 1.0], },
Vertex { pos: [ 0.5, 0.5], color: [0.0, 1.0, 0.0, 1.0], },
Vertex { pos: [-0.5, 0.5], color: [0.0, 0.0, 1.0, 1.0], },
];
struct TriangleProcedure {
vertex_data: Vec<Vertex>,
#[allow(dead_code)]
vertex_storage: GsBufferRepository<Host>,
vertex_buffer : GsVertexBuffer,
graphics_pipeline: GsPipeline<Graphics>,
command_pool : GsCommandPool,
command_buffers: Vec<GsCommandBuffer>,
present_availables: Vec<GsSemaphore>,
}
impl TriangleProcedure {
fn new(initializer: AssetInitializer) -> GsResult<TriangleProcedure> {
let vertex_data = VERTEX_DATA.to_vec();
let (vertex_buffer, vertex_storage) = {
TriangleProcedure::assets(&initializer, &vertex_data)
}?;
let graphics_pipeline = {
TriangleProcedure::pipelines(&initializer)
}?;
let present_availables = {
TriangleProcedure::sync_resources(&initializer, &graphics_pipeline)
}?;
let (command_pool, command_buffers) = {
TriangleProcedure::commands(&initializer, &graphics_pipeline, &vertex_buffer, &vertex_data)
}?;
let procedure = TriangleProcedure {
vertex_data, vertex_storage, vertex_buffer,
graphics_pipeline,
command_pool, command_buffers,
present_availables,
};
Ok(procedure)
}
fn assets(initializer: &AssetInitializer, vertex_data: &Vec<Vertex>) -> GsResult<(GsVertexBuffer, GsBufferRepository<Host>)> {
// vertex buffer
let mut vertex_allocator = GsBufferAllocator::new(initializer, BufferStorageType::HOST);
let vertex_info = GsVertexBuffer::new(data_size!(Vertex), vertex_data.len());
let vertex_index = vertex_allocator.assign(vertex_info)?;
let buffer_distributor = vertex_allocator.allocate()?;
let vertex_buffer = buffer_distributor.acquire(vertex_index);
let mut vertex_storage = buffer_distributor.into_repository();
vertex_storage.data_uploader()?
.upload(&vertex_buffer, vertex_data)?
.finish()?;
Ok((vertex_buffer, vertex_storage))
}
fn pipelines(initializer: &AssetInitializer) -> GsResult<GsPipeline<Graphics>> {
// shaders
let vertex_shader = GsShaderCI::from_spirv(
GsPipelineStage::VERTEX,
Path::new(VERTEX_SHADER_SPIRV_PATH),
None);
let fragment_shader = GsShaderCI::from_spirv(
GsPipelineStage::FRAGMENT,
Path::new(FRAGMENT_SHADER_SPIRV_PATH),
None);
let shader_infos = vec![vertex_shader, fragment_shader];
let vertex_input_desc = Vertex::desc();
// pipeline
let mut render_pass_builder = GsRenderPass::new(initializer);
let first_subpass = render_pass_builder.new_subpass();
let color_attachment = RenderAttachmentCI::<Present>::new(initializer);
let _attachment_index = render_pass_builder.add_attachment(color_attachment, first_subpass);
let dependency0 = RenderDependencyCI::new(SubpassStage::BeginExternal, SubpassStage::AtIndex(first_subpass))
.stage(vk::PipelineStageFlags::BOTTOM_OF_PIPE, vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT)
.access(vk::AccessFlags::MEMORY_READ, vk::AccessFlags::COLOR_ATTACHMENT_READ | vk::AccessFlags::COLOR_ATTACHMENT_WRITE)
.with_flags(vk::DependencyFlags::BY_REGION);
render_pass_builder.add_dependency(dependency0);
let dependency1 = RenderDependencyCI::new(SubpassStage::AtIndex(first_subpass), SubpassStage::EndExternal)
.stage(vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT, vk::PipelineStageFlags::BOTTOM_OF_PIPE)
.access(vk::AccessFlags::COLOR_ATTACHMENT_READ | vk::AccessFlags::COLOR_ATTACHMENT_WRITE, vk::AccessFlags::MEMORY_READ)
.with_flags(vk::DependencyFlags::BY_REGION);
render_pass_builder.add_dependency(dependency1);
let render_pass = render_pass_builder.build()?;
let pipeline_config = GfxPipelineConfig::new(shader_infos, vertex_input_desc, render_pass, initializer.screen_dimension())
.finish();
let mut pipeline_builder = GfxPipelineBuilder::new(initializer)?;
let graphics_pipeline = pipeline_builder.build(pipeline_config)?;
Ok(graphics_pipeline)
}
fn sync_resources(initializer: &AssetInitializer, pipeline: &GsPipeline<Graphics>) -> GsResult<Vec<GsSemaphore>> {
// sync
let mut present_availables = Vec::with_capacity(pipeline.frame_count());
for _ in 0..pipeline.frame_count() {
let semaphore = GsSemaphore::new(initializer)?;
present_availables.push(semaphore);
}
Ok(present_availables)
}
fn commands(initializer: &AssetInitializer, pipeline: &GsPipeline<Graphics>, vertex_buffer: &GsVertexBuffer, data: &Vec<Vertex>) -> GsResult<(GsCommandPool, Vec<GsCommandBuffer>)> {
let command_pool = GsCommandPool::new(initializer, DeviceQueueIdentifier::Graphics)?;
let mut command_buffers = vec![];
let command_buffer_count = pipeline.frame_count();
let raw_commands = command_pool.allocate(CmdBufferUsage::UnitaryCommand, command_buffer_count)?;
for (frame_index, command) in raw_commands.into_iter().enumerate() {
let mut recorder = GsCmdRecorder::<Graphics>::new(initializer, pipeline, command);
recorder.begin_record(vk::CommandBufferUsageFlags::SIMULTANEOUS_USE)?
.begin_render_pass(pipeline, frame_index)
.bind_pipeline()
.bind_vertex_buffers(0, &[vertex_buffer])
.draw(data.len() as vkuint, 1, 0, 0)
.end_render_pass();
let command_recorded = recorder.end_record()?;
command_buffers.push(command_recorded);
}
Ok((command_pool, command_buffers))
}
}
impl GraphicsRoutine for TriangleProcedure {
fn draw(&mut self, device: &GsDevice, device_available: &GsFence, image_available: &GsSemaphore, image_index: usize, _: f32) -> GsResult<&GsSemaphore> {
let submit_info = QueueSubmitBundle {
wait_semaphores: &[image_available],
sign_semaphores: &[&self.present_availables[image_index]],
wait_stages : &[vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT],
commands : &[&self.command_buffers[image_index]],
};
device.logic.submit_single(&submit_info, Some(device_available), DeviceQueueIdentifier::Graphics)?;
return Ok(&self.present_availables[image_index])
}
fn reload_res(&mut self, initializer: AssetInitializer) -> GsResult<()> {
self.graphics_pipeline = TriangleProcedure::pipelines(&initializer)?;
self.present_availables = TriangleProcedure::sync_resources(&initializer, &self.graphics_pipeline)?;
let (command_pool, command_buffers) = TriangleProcedure::commands(&initializer, &self.graphics_pipeline, &self.vertex_buffer, &self.vertex_data)?;
self.command_pool = command_pool;
self.command_buffers = command_buffers;
Ok(())
}
fn react_input(&mut self, inputer: &ActionNerve, _: f32) -> SceneAction {
if inputer.is_key_pressed(GsKeycode::ESCAPE) {
return SceneAction::Terminal
}
SceneAction::Rendering
}
}
fn main() {
// TODO: handle unwrap().
let manifest = PathBuf::from(MANIFEST_PATH);
let mut program_context = ProgramContext::new(Some(manifest)).unwrap();
let builder = program_context.routine().unwrap();
let asset_loader = builder.assets_loader();
let routine = TriangleProcedure::new(asset_loader).unwrap();
let routine_flow = builder.build(routine);
match routine_flow.launch(program_context) {
| Ok(_) => (),
| Err(err) => {
panic!("[Error] {}", err)
},
}
}
| 36.377049 | 185 | 0.6637 |
8753c84ac22f5a5b72dc2a9d82eaea7ea6aa2c9c | 1,146 | // utils.rs
use std::ops::Range;
/// This function is used to create a range for slicing up Lump data
/// It takes a start position and a width and creates a range of (x .. (x + w))
pub fn packet_range(start: usize, width: usize) -> Range<usize> {
(start..(start + width))
}
/// Functions that can convert a grouping of bytes into different data types
/// Multiple types are covered to avoid re-using "as T" for conversions
pub fn u8_to_u16(a: u8, b: u8) -> u16 {
((a as u16) << 0) + ((b as u16) << 8)
}
pub fn u8_to_u32(a: u8, b: u8, c: u8, d: u8) -> u32 {
((a as u32) << 0) + ((b as u32) << 8) + ((c as u32) << 16) + ((d as u32) << 24)
}
pub fn u8_to_i16(a: u8, b: u8) -> i16 {
u8_to_u16(a, b) as i16
}
// testing section for byte conversions go here
#[cfg(test)]
mod tests {
use utils::*;
const DATA1: [u8; 2] = [0, 0];
const DATA2: [u8; 2] = [255, 255];
#[test]
fn test_u16_to_i16() {
let data: [u8; 2] = [0, 0];
let unsigned_thing = u8_to_u16(data[0], data[1]);
let signed_thing = 0;
assert_eq!(unsigned_thing, signed_thing, "Conversion fail");
}
}
// end
| 24.382979 | 83 | 0.582897 |
ebb847b384c97efd52af7444d9fccbad94d528de | 3,279 | use bitcoin::hashes::Hash;
use bitcoin::{Address, PublicKey, WPubkeyHash, WScriptHash};
use clap;
use cmd;
use hal;
pub fn subcommand<'a>() -> clap::App<'a, 'a> {
cmd::subcommand_group("address", "work with addresses")
.subcommand(cmd_create())
.subcommand(cmd_inspect())
}
pub fn execute<'a>(matches: &clap::ArgMatches<'a>) {
match matches.subcommand() {
("create", Some(ref m)) => exec_create(&m),
("inspect", Some(ref m)) => exec_inspect(&m),
(_, _) => unreachable!("clap prints help"),
};
}
fn cmd_create<'a>() -> clap::App<'a, 'a> {
cmd::subcommand("create", "create addresses").args(&cmd::opts_networks()).args(&[
cmd::opt_yaml(),
cmd::opt("pubkey", "a public key in hex").takes_value(true).required(false),
cmd::opt("script", "a script in hex").takes_value(true).required(false),
])
}
fn exec_create<'a>(matches: &clap::ArgMatches<'a>) {
let network = cmd::network(matches);
let created = if let Some(pubkey_hex) = matches.value_of("pubkey") {
let pubkey: PublicKey = pubkey_hex.parse().expect("invalid pubkey");
hal::address::Addresses::from_pubkey(&pubkey, network)
} else if let Some(script_hex) = matches.value_of("script") {
let script_bytes = hex::decode(script_hex).expect("invalid script hex");
let script = script_bytes.into();
hal::address::Addresses::from_script(&script, network)
} else {
panic!("Can't create addresses without a pubkey");
};
cmd::print_output(matches, &created)
}
fn cmd_inspect<'a>() -> clap::App<'a, 'a> {
cmd::subcommand("inspect", "inspect addresses")
.args(&[cmd::opt_yaml(), cmd::arg("address", "the address").required(true)])
}
fn exec_inspect<'a>(matches: &clap::ArgMatches<'a>) {
let address_str = matches.value_of("address").expect("no address provided");
let address: Address = address_str.parse().expect("invalid address format");
let script_pk = address.script_pubkey();
let mut info = hal::address::AddressInfo {
network: address.network,
script_pub_key: hal::tx::OutputScriptInfo {
hex: Some(script_pk.to_bytes().into()),
asm: Some(script_pk.asm()),
address: None,
type_: None,
},
type_: None,
pubkey_hash: None,
script_hash: None,
witness_pubkey_hash: None,
witness_script_hash: None,
witness_program_version: None,
};
use bitcoin::util::address::Payload;
match address.payload {
Payload::PubkeyHash(pkh) => {
info.type_ = Some("p2pkh".to_owned());
info.pubkey_hash = Some(pkh);
}
Payload::ScriptHash(sh) => {
info.type_ = Some("p2sh".to_owned());
info.script_hash = Some(sh);
}
Payload::WitnessProgram {
version,
program,
} => {
let version = version.to_u8() as usize;
info.witness_program_version = Some(version);
if version == 0 {
if program.len() == 20 {
info.type_ = Some("p2wpkh".to_owned());
info.witness_pubkey_hash =
Some(WPubkeyHash::from_slice(&program).expect("size 20"));
} else if program.len() == 32 {
info.type_ = Some("p2wsh".to_owned());
info.witness_script_hash =
Some(WScriptHash::from_slice(&program).expect("size 32"));
} else {
info.type_ = Some("invalid-witness-program".to_owned());
}
} else {
info.type_ = Some("unknown-witness-program-version".to_owned());
}
}
}
cmd::print_output(matches, &info)
}
| 29.540541 | 82 | 0.663922 |
8aefb2fe68a171a2cf94039b3e72268bf02b11c1 | 332 | use super::intscript::Computer;
#[aoc(day5, part2)]
fn part2(input: &str) -> i64 {
Computer::new_from_text(input).siso(5)
}
#[aoc(day5, part1)]
fn part1(input: &str) -> i64 {
Computer::new_from_text(input)
.add_input(1)
.run()
.get_outputs()
.pop_back()
.unwrap()
}
| 19.529412 | 43 | 0.542169 |
4bd35fb60ced32a272f698d0ad59d140c5f93e1c | 362 | use std::io::Read;
fn main() {
let mut buff = String::new();
std::io::stdin().read_to_string(&mut buff).unwrap();
let mut iter = buff.split_whitespace();
let s: String = iter.next().unwrap().parse().unwrap();
let x: i32 = s.matches('+').count() as i32;
let y: i32 = s.matches('-').count() as i32;
println!("{}", x - y);
()
}
| 21.294118 | 58 | 0.546961 |
d59d112aba9ebed2fb3db361af820bb083a4e8e5 | 6,658 | /*
* Copyright 2020 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
mod catchable;
mod joinable;
pub(crate) use catchable::Catchable;
pub(crate) use joinable::Joinable;
use super::Stream;
use crate::execution_step::lambda_applier::LambdaError;
use crate::JValue;
use air_interpreter_interface::CallResults;
use air_trace_handler::MergerApResult;
use air_trace_handler::TraceHandlerError;
use strum::IntoEnumIterator;
use strum_macros::EnumDiscriminants;
use strum_macros::EnumIter;
use thiserror::Error as ThisError;
use std::rc::Rc;
/// Errors arisen while executing AIR script.
#[derive(ThisError, EnumDiscriminants, Debug)]
#[strum_discriminants(derive(EnumIter))]
pub(crate) enum ExecutionError {
/// Semantic errors in a call instructions.
#[error("call should have service id specified by peer part or function part")]
IncorrectCallTriplet,
/// An error is occurred while calling local service via call_service.
#[error("Local service error, ret_code is {0}, error message is '{1}'")]
LocalServiceError(i32, Rc<String>),
/// Value for such name isn't presence in data.
#[error("variable with name '{0}' isn't present in data")]
VariableNotFound(String),
/// Multiple values for such name found.
#[error("multiple variables found for name '{0}' in data")]
MultipleVariablesFound(String),
/// An error occurred while trying to apply lambda to a value.
#[error(transparent)]
LambdaApplierError(#[from] LambdaError),
/// An error occurred while trying to apply lambda to an empty stream.
#[error("lambda is applied to an empty stream")]
EmptyStreamLambdaError,
/// Provided JValue has incompatible type with a requested one.
#[error("expected JValue type '{1}', but got '{0}' JValue")]
IncompatibleJValueType(JValue, &'static str),
/// Fold state wasn't found for such iterator name.
#[error("fold state not found for this iterable '{0}'")]
FoldStateNotFound(String),
/// Multiple fold states found for such iterator name.
#[error("multiple iterable values found for iterable name '{0}'")]
MultipleIterableValues(String),
/// A fold instruction must iterate over array value.
#[error("lambda '{1}' returned non-array value '{0}' for fold instruction")]
FoldIteratesOverNonArray(JValue, String),
/// Errors encountered while shadowing non-scalar values.
#[error("variable with name '{0}' can't be shadowed, shadowing isn't supported for iterables")]
IterableShadowing(String),
/// This error type is produced by a match to notify xor that compared values aren't equal.
#[error("match is used without corresponding xor")]
MatchWithoutXorError,
/// This error type is produced by a mismatch to notify xor that compared values aren't equal.
#[error("mismatch is used without corresponding xor")]
MismatchWithoutXorError,
/// Errors bubbled from a trace handler.
#[error(transparent)]
TraceError(#[from] TraceHandlerError),
/// Errors occurred while insertion of a value inside stream that doesn't have corresponding generation.
#[error("stream {0:?} doesn't have generation with number {1}, probably a supplied to the interpreter data is corrupted")]
StreamDontHaveSuchGeneration(Stream, usize),
/// Errors occurred when result from data doesn't match to a instruction, f.e. an instruction
/// could be applied to a stream, but result doesn't contain generation in a source position.
#[error("ap result {0:?} doesn't match corresponding instruction")]
ApResultNotCorrespondToInstr(MergerApResult),
/// Call results should be empty at the end of execution thanks to a execution invariant.
#[error(
"after finishing execution of supplied AIR, call results aren't empty: `{0:?}`, probably wrong call_id used"
)]
CallResultsNotEmpty(CallResults),
}
impl From<LambdaError> for Rc<ExecutionError> {
fn from(e: LambdaError) -> Self {
Rc::new(ExecutionError::LambdaApplierError(e))
}
}
/// This macro is needed because it's impossible to implement
/// From<TraceHandlerError> for Rc<ExecutionError> due to the orphan rule.
#[macro_export]
macro_rules! trace_to_exec_err {
($trace_expr: expr) => {
$trace_expr.map_err(|e| std::rc::Rc::new(crate::execution_step::ExecutionError::TraceError(e)))
};
}
impl ExecutionError {
pub(crate) fn to_error_code(&self) -> u32 {
let mut errors = ExecutionErrorDiscriminants::iter();
let actual_error_type = ExecutionErrorDiscriminants::from(self);
// unwrap is safe here because errors are guaranteed to contain all errors variants
errors.position(|et| et == actual_error_type).unwrap() as _
}
}
macro_rules! log_join {
($($args:tt)*) => {
log::info!(target: air_log_targets::JOIN_BEHAVIOUR, $($args)*)
}
}
#[rustfmt::skip::macros(log_join)]
impl Joinable for ExecutionError {
/// Returns true, if supplied error is related to variable not found errors type.
/// Print log if this is joinable error type.
fn is_joinable(&self) -> bool {
use ExecutionError::*;
match self {
VariableNotFound(var_name) => {
log_join!(" waiting for an argument with name '{}'", var_name);
true
}
LambdaApplierError(LambdaError::StreamNotHaveEnoughValues { stream_size, idx }) => {
log_join!(" waiting for an argument with idx '{}' on stream with size '{}'", idx, stream_size);
true
}
EmptyStreamLambdaError => {
log_join!(" waiting on empty stream for path ");
true
}
_ => false,
}
}
}
impl Catchable for ExecutionError {
fn is_catchable(&self) -> bool {
// this kind is related to an invalid data and should treat as a non-catchable error
!matches!(self, ExecutionError::TraceError(_))
}
}
impl From<std::convert::Infallible> for ExecutionError {
fn from(_: std::convert::Infallible) -> Self {
unreachable!()
}
}
| 36.582418 | 126 | 0.686242 |
f5dc6058a4456efd91f35d634ed93fa347ccccb8 | 214,181 | #![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AccessUri {
#[serde(rename = "accessSAS", default, skip_serializing_if = "Option::is_none")]
pub access_sas: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AdditionalCapabilities {
#[serde(rename = "ultraSSDEnabled", default, skip_serializing_if = "Option::is_none")]
pub ultra_ssd_enabled: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AdditionalUnattendContent {
#[serde(rename = "passName", default, skip_serializing_if = "Option::is_none")]
pub pass_name: Option<additional_unattend_content::PassName>,
#[serde(rename = "componentName", default, skip_serializing_if = "Option::is_none")]
pub component_name: Option<additional_unattend_content::ComponentName>,
#[serde(rename = "settingName", default, skip_serializing_if = "Option::is_none")]
pub setting_name: Option<additional_unattend_content::SettingName>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub content: Option<String>,
}
pub mod additional_unattend_content {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PassName {
OobeSystem,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ComponentName {
#[serde(rename = "Microsoft-Windows-Shell-Setup")]
MicrosoftWindowsShellSetup,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum SettingName {
AutoLogon,
FirstLogonCommands,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ApiEntityReference {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ApiError {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<ApiErrorBase>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub innererror: Option<InnerError>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ApiErrorBase {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AutomaticOsUpgradePolicy {
#[serde(rename = "enableAutomaticOSUpgrade", default, skip_serializing_if = "Option::is_none")]
pub enable_automatic_os_upgrade: Option<bool>,
#[serde(rename = "disableAutomaticRollback", default, skip_serializing_if = "Option::is_none")]
pub disable_automatic_rollback: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AutomaticOsUpgradeProperties {
#[serde(rename = "automaticOSUpgradeSupported")]
pub automatic_os_upgrade_supported: bool,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AutomaticRepairsPolicy {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub enabled: Option<bool>,
#[serde(rename = "gracePeriod", default, skip_serializing_if = "Option::is_none")]
pub grace_period: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AvailabilitySet {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<AvailabilitySetProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<Sku>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AvailabilitySetListResult {
pub value: Vec<AvailabilitySet>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AvailabilitySetProperties {
#[serde(rename = "platformUpdateDomainCount", default, skip_serializing_if = "Option::is_none")]
pub platform_update_domain_count: Option<i32>,
#[serde(rename = "platformFaultDomainCount", default, skip_serializing_if = "Option::is_none")]
pub platform_fault_domain_count: Option<i32>,
#[serde(rename = "virtualMachines", default, skip_serializing_if = "Vec::is_empty")]
pub virtual_machines: Vec<SubResource>,
#[serde(rename = "proximityPlacementGroup", default, skip_serializing_if = "Option::is_none")]
pub proximity_placement_group: Option<SubResource>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub statuses: Vec<InstanceViewStatus>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AvailabilitySetSkuType {
Classic,
Aligned,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AvailabilitySetUpdate {
#[serde(flatten)]
pub update_resource: UpdateResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<AvailabilitySetProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<Sku>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AvailablePatchSummary {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<available_patch_summary::Status>,
#[serde(rename = "assessmentActivityId", default, skip_serializing_if = "Option::is_none")]
pub assessment_activity_id: Option<String>,
#[serde(rename = "rebootPending", default, skip_serializing_if = "Option::is_none")]
pub reboot_pending: Option<bool>,
#[serde(rename = "criticalAndSecurityPatchCount", default, skip_serializing_if = "Option::is_none")]
pub critical_and_security_patch_count: Option<i32>,
#[serde(rename = "otherPatchCount", default, skip_serializing_if = "Option::is_none")]
pub other_patch_count: Option<i32>,
#[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")]
pub start_time: Option<String>,
#[serde(rename = "lastModifiedTime", default, skip_serializing_if = "Option::is_none")]
pub last_modified_time: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ApiError>,
}
pub mod available_patch_summary {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
InProgress,
Failed,
Succeeded,
CompletedWithWarnings,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BillingProfile {
#[serde(rename = "maxPrice", default, skip_serializing_if = "Option::is_none")]
pub max_price: Option<f64>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BootDiagnostics {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub enabled: Option<bool>,
#[serde(rename = "storageUri", default, skip_serializing_if = "Option::is_none")]
pub storage_uri: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BootDiagnosticsInstanceView {
#[serde(rename = "consoleScreenshotBlobUri", default, skip_serializing_if = "Option::is_none")]
pub console_screenshot_blob_uri: Option<String>,
#[serde(rename = "serialConsoleLogBlobUri", default, skip_serializing_if = "Option::is_none")]
pub serial_console_log_blob_uri: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<InstanceViewStatus>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Caching {
None,
ReadOnly,
ReadWrite,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudError {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ApiError>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudService {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
pub location: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<CloudServiceProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudServiceExtensionProfile {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub extensions: Vec<Extension>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudServiceExtensionProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub publisher: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(rename = "typeHandlerVersion", default, skip_serializing_if = "Option::is_none")]
pub type_handler_version: Option<String>,
#[serde(rename = "autoUpgradeMinorVersion", default, skip_serializing_if = "Option::is_none")]
pub auto_upgrade_minor_version: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub settings: Option<String>,
#[serde(rename = "protectedSettings", default, skip_serializing_if = "Option::is_none")]
pub protected_settings: Option<String>,
#[serde(rename = "protectedSettingsFromKeyVault", default, skip_serializing_if = "Option::is_none")]
pub protected_settings_from_key_vault: Option<CloudServiceVaultAndSecretReference>,
#[serde(rename = "forceUpdateTag", default, skip_serializing_if = "Option::is_none")]
pub force_update_tag: Option<String>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "rolesAppliedTo", default, skip_serializing_if = "Vec::is_empty")]
pub roles_applied_to: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudServiceInstanceView {
#[serde(rename = "roleInstance", default, skip_serializing_if = "Option::is_none")]
pub role_instance: Option<InstanceViewStatusesSummary>,
#[serde(rename = "sdkVersion", default, skip_serializing_if = "Option::is_none")]
pub sdk_version: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub statuses: Vec<ResourceInstanceViewStatus>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudServiceListResult {
pub value: Vec<CloudService>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudServiceNetworkProfile {
#[serde(rename = "loadBalancerConfigurations", default, skip_serializing_if = "Vec::is_empty")]
pub load_balancer_configurations: Vec<LoadBalancerConfiguration>,
#[serde(rename = "swappableCloudService", default, skip_serializing_if = "Option::is_none")]
pub swappable_cloud_service: Option<SubResource>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudServiceOsProfile {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub secrets: Vec<CloudServiceVaultSecretGroup>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudServiceProperties {
#[serde(rename = "packageUrl", default, skip_serializing_if = "Option::is_none")]
pub package_url: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub configuration: Option<String>,
#[serde(rename = "configurationUrl", default, skip_serializing_if = "Option::is_none")]
pub configuration_url: Option<String>,
#[serde(rename = "startCloudService", default, skip_serializing_if = "Option::is_none")]
pub start_cloud_service: Option<bool>,
#[serde(rename = "upgradeMode", default, skip_serializing_if = "Option::is_none")]
pub upgrade_mode: Option<CloudServiceUpgradeMode>,
#[serde(rename = "roleProfile", default, skip_serializing_if = "Option::is_none")]
pub role_profile: Option<CloudServiceRoleProfile>,
#[serde(rename = "osProfile", default, skip_serializing_if = "Option::is_none")]
pub os_profile: Option<CloudServiceOsProfile>,
#[serde(rename = "networkProfile", default, skip_serializing_if = "Option::is_none")]
pub network_profile: Option<CloudServiceNetworkProfile>,
#[serde(rename = "extensionProfile", default, skip_serializing_if = "Option::is_none")]
pub extension_profile: Option<CloudServiceExtensionProfile>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "uniqueId", default, skip_serializing_if = "Option::is_none")]
pub unique_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudServiceRole {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<CloudServiceRoleSku>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<CloudServiceRoleProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudServiceRoleListResult {
pub value: Vec<CloudServiceRole>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudServiceRoleProfile {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub roles: Vec<CloudServiceRoleProfileProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudServiceRoleProfileProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<CloudServiceRoleSku>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudServiceRoleProperties {
#[serde(rename = "uniqueId", default, skip_serializing_if = "Option::is_none")]
pub unique_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudServiceRoleSku {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tier: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub capacity: Option<i64>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudServiceUpdate {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum CloudServiceUpgradeMode {
Auto,
Manual,
Simultaneous,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudServiceVaultAndSecretReference {
#[serde(rename = "sourceVault", default, skip_serializing_if = "Option::is_none")]
pub source_vault: Option<SubResource>,
#[serde(rename = "secretUrl", default, skip_serializing_if = "Option::is_none")]
pub secret_url: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudServiceVaultCertificate {
#[serde(rename = "certificateUrl", default, skip_serializing_if = "Option::is_none")]
pub certificate_url: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudServiceVaultSecretGroup {
#[serde(rename = "sourceVault", default, skip_serializing_if = "Option::is_none")]
pub source_vault: Option<SubResource>,
#[serde(rename = "vaultCertificates", default, skip_serializing_if = "Vec::is_empty")]
pub vault_certificates: Vec<CloudServiceVaultCertificate>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ComputeOperationListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ComputeOperationValue>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ComputeOperationValue {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub origin: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<ComputeOperationValueDisplay>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ComputeOperationValueDisplay {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ContainerService {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ContainerServiceProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ContainerServiceAgentPoolProfile {
pub name: String,
pub count: i32,
#[serde(rename = "vmSize")]
pub vm_size: container_service_agent_pool_profile::VmSize,
#[serde(rename = "dnsPrefix")]
pub dns_prefix: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub fqdn: Option<String>,
}
pub mod container_service_agent_pool_profile {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum VmSize {
#[serde(rename = "Standard_A0")]
StandardA0,
#[serde(rename = "Standard_A1")]
StandardA1,
#[serde(rename = "Standard_A2")]
StandardA2,
#[serde(rename = "Standard_A3")]
StandardA3,
#[serde(rename = "Standard_A4")]
StandardA4,
#[serde(rename = "Standard_A5")]
StandardA5,
#[serde(rename = "Standard_A6")]
StandardA6,
#[serde(rename = "Standard_A7")]
StandardA7,
#[serde(rename = "Standard_A8")]
StandardA8,
#[serde(rename = "Standard_A9")]
StandardA9,
#[serde(rename = "Standard_A10")]
StandardA10,
#[serde(rename = "Standard_A11")]
StandardA11,
#[serde(rename = "Standard_D1")]
StandardD1,
#[serde(rename = "Standard_D2")]
StandardD2,
#[serde(rename = "Standard_D3")]
StandardD3,
#[serde(rename = "Standard_D4")]
StandardD4,
#[serde(rename = "Standard_D11")]
StandardD11,
#[serde(rename = "Standard_D12")]
StandardD12,
#[serde(rename = "Standard_D13")]
StandardD13,
#[serde(rename = "Standard_D14")]
StandardD14,
#[serde(rename = "Standard_D1_v2")]
StandardD1V2,
#[serde(rename = "Standard_D2_v2")]
StandardD2V2,
#[serde(rename = "Standard_D3_v2")]
StandardD3V2,
#[serde(rename = "Standard_D4_v2")]
StandardD4V2,
#[serde(rename = "Standard_D5_v2")]
StandardD5V2,
#[serde(rename = "Standard_D11_v2")]
StandardD11V2,
#[serde(rename = "Standard_D12_v2")]
StandardD12V2,
#[serde(rename = "Standard_D13_v2")]
StandardD13V2,
#[serde(rename = "Standard_D14_v2")]
StandardD14V2,
#[serde(rename = "Standard_G1")]
StandardG1,
#[serde(rename = "Standard_G2")]
StandardG2,
#[serde(rename = "Standard_G3")]
StandardG3,
#[serde(rename = "Standard_G4")]
StandardG4,
#[serde(rename = "Standard_G5")]
StandardG5,
#[serde(rename = "Standard_DS1")]
StandardDs1,
#[serde(rename = "Standard_DS2")]
StandardDs2,
#[serde(rename = "Standard_DS3")]
StandardDs3,
#[serde(rename = "Standard_DS4")]
StandardDs4,
#[serde(rename = "Standard_DS11")]
StandardDs11,
#[serde(rename = "Standard_DS12")]
StandardDs12,
#[serde(rename = "Standard_DS13")]
StandardDs13,
#[serde(rename = "Standard_DS14")]
StandardDs14,
#[serde(rename = "Standard_GS1")]
StandardGs1,
#[serde(rename = "Standard_GS2")]
StandardGs2,
#[serde(rename = "Standard_GS3")]
StandardGs3,
#[serde(rename = "Standard_GS4")]
StandardGs4,
#[serde(rename = "Standard_GS5")]
StandardGs5,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ContainerServiceCustomProfile {
pub orchestrator: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ContainerServiceDiagnosticsProfile {
#[serde(rename = "vmDiagnostics")]
pub vm_diagnostics: ContainerServiceVmDiagnostics,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ContainerServiceLinuxProfile {
#[serde(rename = "adminUsername")]
pub admin_username: String,
pub ssh: ContainerServiceSshConfiguration,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ContainerServiceListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ContainerService>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ContainerServiceMasterProfile {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub count: Option<container_service_master_profile::Count>,
#[serde(rename = "dnsPrefix")]
pub dns_prefix: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub fqdn: Option<String>,
}
pub mod container_service_master_profile {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Count {}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ContainerServiceOrchestratorProfile {
#[serde(rename = "orchestratorType")]
pub orchestrator_type: container_service_orchestrator_profile::OrchestratorType,
}
pub mod container_service_orchestrator_profile {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OrchestratorType {
Swarm,
#[serde(rename = "DCOS")]
Dcos,
Custom,
Kubernetes,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ContainerServiceProperties {
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "orchestratorProfile", default, skip_serializing_if = "Option::is_none")]
pub orchestrator_profile: Option<ContainerServiceOrchestratorProfile>,
#[serde(rename = "customProfile", default, skip_serializing_if = "Option::is_none")]
pub custom_profile: Option<ContainerServiceCustomProfile>,
#[serde(rename = "servicePrincipalProfile", default, skip_serializing_if = "Option::is_none")]
pub service_principal_profile: Option<ContainerServiceServicePrincipalProfile>,
#[serde(rename = "masterProfile")]
pub master_profile: ContainerServiceMasterProfile,
#[serde(rename = "agentPoolProfiles")]
pub agent_pool_profiles: Vec<ContainerServiceAgentPoolProfile>,
#[serde(rename = "windowsProfile", default, skip_serializing_if = "Option::is_none")]
pub windows_profile: Option<ContainerServiceWindowsProfile>,
#[serde(rename = "linuxProfile")]
pub linux_profile: ContainerServiceLinuxProfile,
#[serde(rename = "diagnosticsProfile", default, skip_serializing_if = "Option::is_none")]
pub diagnostics_profile: Option<ContainerServiceDiagnosticsProfile>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ContainerServiceServicePrincipalProfile {
#[serde(rename = "clientId")]
pub client_id: String,
pub secret: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ContainerServiceSshConfiguration {
#[serde(rename = "publicKeys")]
pub public_keys: Vec<ContainerServiceSshPublicKey>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ContainerServiceSshPublicKey {
#[serde(rename = "keyData")]
pub key_data: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ContainerServiceVmDiagnostics {
pub enabled: bool,
#[serde(rename = "storageUri", default, skip_serializing_if = "Option::is_none")]
pub storage_uri: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ContainerServiceWindowsProfile {
#[serde(rename = "adminUsername")]
pub admin_username: String,
#[serde(rename = "adminPassword")]
pub admin_password: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum CreateOption {
FromImage,
Empty,
Attach,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CreationData {
#[serde(rename = "createOption")]
pub create_option: creation_data::CreateOption,
#[serde(rename = "storageAccountId", default, skip_serializing_if = "Option::is_none")]
pub storage_account_id: Option<String>,
#[serde(rename = "imageReference", default, skip_serializing_if = "Option::is_none")]
pub image_reference: Option<ImageDiskReference>,
#[serde(rename = "galleryImageReference", default, skip_serializing_if = "Option::is_none")]
pub gallery_image_reference: Option<ImageDiskReference>,
#[serde(rename = "sourceUri", default, skip_serializing_if = "Option::is_none")]
pub source_uri: Option<String>,
#[serde(rename = "sourceResourceId", default, skip_serializing_if = "Option::is_none")]
pub source_resource_id: Option<String>,
#[serde(rename = "sourceUniqueId", default, skip_serializing_if = "Option::is_none")]
pub source_unique_id: Option<String>,
#[serde(rename = "uploadSizeBytes", default, skip_serializing_if = "Option::is_none")]
pub upload_size_bytes: Option<i64>,
#[serde(rename = "logicalSectorSize", default, skip_serializing_if = "Option::is_none")]
pub logical_sector_size: Option<i32>,
}
pub mod creation_data {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum CreateOption {
Empty,
Attach,
FromImage,
Import,
Copy,
Restore,
Upload,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DataDisk {
pub lun: i32,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub vhd: Option<VirtualHardDisk>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub image: Option<VirtualHardDisk>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub caching: Option<Caching>,
#[serde(rename = "writeAcceleratorEnabled", default, skip_serializing_if = "Option::is_none")]
pub write_accelerator_enabled: Option<bool>,
#[serde(rename = "createOption")]
pub create_option: CreateOption,
#[serde(rename = "diskSizeGB", default, skip_serializing_if = "Option::is_none")]
pub disk_size_gb: Option<i32>,
#[serde(rename = "managedDisk", default, skip_serializing_if = "Option::is_none")]
pub managed_disk: Option<ManagedDiskParameters>,
#[serde(rename = "toBeDetached", default, skip_serializing_if = "Option::is_none")]
pub to_be_detached: Option<bool>,
#[serde(rename = "diskIOPSReadWrite", default, skip_serializing_if = "Option::is_none")]
pub disk_iops_read_write: Option<i64>,
#[serde(rename = "diskMBpsReadWrite", default, skip_serializing_if = "Option::is_none")]
pub disk_m_bps_read_write: Option<i64>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DataDiskImage {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub lun: Option<i32>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DataDiskImageEncryption {
#[serde(flatten)]
pub disk_image_encryption: DiskImageEncryption,
pub lun: i32,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DedicatedHost {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DedicatedHostProperties>,
pub sku: Sku,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DedicatedHostAllocatableVm {
#[serde(rename = "vmSize", default, skip_serializing_if = "Option::is_none")]
pub vm_size: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub count: Option<f64>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DedicatedHostAvailableCapacity {
#[serde(rename = "allocatableVMs", default, skip_serializing_if = "Vec::is_empty")]
pub allocatable_v_ms: Vec<DedicatedHostAllocatableVm>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DedicatedHostGroup {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DedicatedHostGroupProperties>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub zones: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DedicatedHostGroupInstanceView {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub hosts: Vec<DedicatedHostInstanceViewWithName>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DedicatedHostGroupListResult {
pub value: Vec<DedicatedHostGroup>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DedicatedHostGroupProperties {
#[serde(rename = "platformFaultDomainCount")]
pub platform_fault_domain_count: i32,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub hosts: Vec<SubResourceReadOnly>,
#[serde(rename = "instanceView", default, skip_serializing_if = "Option::is_none")]
pub instance_view: Option<DedicatedHostGroupInstanceView>,
#[serde(rename = "supportAutomaticPlacement", default, skip_serializing_if = "Option::is_none")]
pub support_automatic_placement: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DedicatedHostGroupUpdate {
#[serde(flatten)]
pub update_resource: UpdateResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DedicatedHostGroupProperties>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub zones: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DedicatedHostInstanceView {
#[serde(rename = "assetId", default, skip_serializing_if = "Option::is_none")]
pub asset_id: Option<String>,
#[serde(rename = "availableCapacity", default, skip_serializing_if = "Option::is_none")]
pub available_capacity: Option<DedicatedHostAvailableCapacity>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub statuses: Vec<InstanceViewStatus>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DedicatedHostInstanceViewWithName {
#[serde(flatten)]
pub dedicated_host_instance_view: DedicatedHostInstanceView,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum DedicatedHostLicenseType {
None,
#[serde(rename = "Windows_Server_Hybrid")]
WindowsServerHybrid,
#[serde(rename = "Windows_Server_Perpetual")]
WindowsServerPerpetual,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DedicatedHostListResult {
pub value: Vec<DedicatedHost>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DedicatedHostProperties {
#[serde(rename = "platformFaultDomain", default, skip_serializing_if = "Option::is_none")]
pub platform_fault_domain: Option<i32>,
#[serde(rename = "autoReplaceOnFailure", default, skip_serializing_if = "Option::is_none")]
pub auto_replace_on_failure: Option<bool>,
#[serde(rename = "hostId", default, skip_serializing_if = "Option::is_none")]
pub host_id: Option<String>,
#[serde(rename = "virtualMachines", default, skip_serializing_if = "Vec::is_empty")]
pub virtual_machines: Vec<SubResourceReadOnly>,
#[serde(rename = "licenseType", default, skip_serializing_if = "Option::is_none")]
pub license_type: Option<DedicatedHostLicenseType>,
#[serde(rename = "provisioningTime", default, skip_serializing_if = "Option::is_none")]
pub provisioning_time: Option<String>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "instanceView", default, skip_serializing_if = "Option::is_none")]
pub instance_view: Option<DedicatedHostInstanceView>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DedicatedHostUpdate {
#[serde(flatten)]
pub update_resource: UpdateResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DedicatedHostProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiagnosticsProfile {
#[serde(rename = "bootDiagnostics", default, skip_serializing_if = "Option::is_none")]
pub boot_diagnostics: Option<BootDiagnostics>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum DiffDiskOption {
Local,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum DiffDiskPlacement {
CacheDisk,
ResourceDisk,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiffDiskSettings {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub option: Option<DiffDiskOption>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub placement: Option<DiffDiskPlacement>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Disallowed {
#[serde(rename = "diskTypes", default, skip_serializing_if = "Vec::is_empty")]
pub disk_types: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DisallowedConfiguration {
#[serde(rename = "vmDiskType", default, skip_serializing_if = "Option::is_none")]
pub vm_disk_type: Option<disallowed_configuration::VmDiskType>,
}
pub mod disallowed_configuration {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum VmDiskType {
None,
Unmanaged,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Disk {
#[serde(flatten)]
pub resource: Resource,
#[serde(rename = "managedBy", default, skip_serializing_if = "Option::is_none")]
pub managed_by: Option<String>,
#[serde(rename = "managedByExtended", default, skip_serializing_if = "Vec::is_empty")]
pub managed_by_extended: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<DiskSku>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub zones: Vec<String>,
#[serde(rename = "extendedLocation", default, skip_serializing_if = "Option::is_none")]
pub extended_location: Option<ExtendedLocation>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DiskProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskAccess {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DiskAccessProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskAccessList {
pub value: Vec<DiskAccess>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskAccessProperties {
#[serde(rename = "privateEndpointConnections", default, skip_serializing_if = "Vec::is_empty")]
pub private_endpoint_connections: Vec<PrivateEndpointConnection>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "timeCreated", default, skip_serializing_if = "Option::is_none")]
pub time_created: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskAccessUpdate {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskEncryptionSet {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<EncryptionSetIdentity>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<EncryptionSetProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskEncryptionSetList {
pub value: Vec<DiskEncryptionSet>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskEncryptionSetParameters {
#[serde(flatten)]
pub sub_resource: SubResource,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum DiskEncryptionSetType {
EncryptionAtRestWithCustomerKey,
EncryptionAtRestWithPlatformAndCustomerKeys,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskEncryptionSetUpdate {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DiskEncryptionSetUpdateProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskEncryptionSetUpdateProperties {
#[serde(rename = "encryptionType", default, skip_serializing_if = "Option::is_none")]
pub encryption_type: Option<DiskEncryptionSetType>,
#[serde(rename = "activeKey", default, skip_serializing_if = "Option::is_none")]
pub active_key: Option<KeyForDiskEncryptionSet>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskEncryptionSettings {
#[serde(rename = "diskEncryptionKey", default, skip_serializing_if = "Option::is_none")]
pub disk_encryption_key: Option<KeyVaultSecretReference>,
#[serde(rename = "keyEncryptionKey", default, skip_serializing_if = "Option::is_none")]
pub key_encryption_key: Option<KeyVaultKeyReference>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub enabled: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskImageEncryption {
#[serde(rename = "diskEncryptionSetId", default, skip_serializing_if = "Option::is_none")]
pub disk_encryption_set_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskInstanceView {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "encryptionSettings", default, skip_serializing_if = "Vec::is_empty")]
pub encryption_settings: Vec<DiskEncryptionSettings>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub statuses: Vec<InstanceViewStatus>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskList {
pub value: Vec<Disk>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskProperties {
#[serde(rename = "timeCreated", default, skip_serializing_if = "Option::is_none")]
pub time_created: Option<String>,
#[serde(rename = "osType", default, skip_serializing_if = "Option::is_none")]
pub os_type: Option<disk_properties::OsType>,
#[serde(rename = "hyperVGeneration", default, skip_serializing_if = "Option::is_none")]
pub hyper_v_generation: Option<disk_properties::HyperVGeneration>,
#[serde(rename = "purchasePlan", default, skip_serializing_if = "Option::is_none")]
pub purchase_plan: Option<PurchasePlan>,
#[serde(rename = "creationData")]
pub creation_data: CreationData,
#[serde(rename = "diskSizeGB", default, skip_serializing_if = "Option::is_none")]
pub disk_size_gb: Option<i32>,
#[serde(rename = "diskSizeBytes", default, skip_serializing_if = "Option::is_none")]
pub disk_size_bytes: Option<i64>,
#[serde(rename = "uniqueId", default, skip_serializing_if = "Option::is_none")]
pub unique_id: Option<String>,
#[serde(rename = "encryptionSettingsCollection", default, skip_serializing_if = "Option::is_none")]
pub encryption_settings_collection: Option<EncryptionSettingsCollection>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "diskIOPSReadWrite", default, skip_serializing_if = "Option::is_none")]
pub disk_iops_read_write: Option<i64>,
#[serde(rename = "diskMBpsReadWrite", default, skip_serializing_if = "Option::is_none")]
pub disk_m_bps_read_write: Option<i64>,
#[serde(rename = "diskIOPSReadOnly", default, skip_serializing_if = "Option::is_none")]
pub disk_iops_read_only: Option<i64>,
#[serde(rename = "diskMBpsReadOnly", default, skip_serializing_if = "Option::is_none")]
pub disk_m_bps_read_only: Option<i64>,
#[serde(rename = "diskState", default, skip_serializing_if = "Option::is_none")]
pub disk_state: Option<DiskState>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub encryption: Option<Encryption>,
#[serde(rename = "maxShares", default, skip_serializing_if = "Option::is_none")]
pub max_shares: Option<i32>,
#[serde(rename = "shareInfo", default, skip_serializing_if = "Vec::is_empty")]
pub share_info: Vec<ShareInfoElement>,
#[serde(rename = "networkAccessPolicy", default, skip_serializing_if = "Option::is_none")]
pub network_access_policy: Option<NetworkAccessPolicy>,
#[serde(rename = "diskAccessId", default, skip_serializing_if = "Option::is_none")]
pub disk_access_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tier: Option<String>,
#[serde(rename = "burstingEnabled", default, skip_serializing_if = "Option::is_none")]
pub bursting_enabled: Option<bool>,
}
pub mod disk_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsType {
Windows,
Linux,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HyperVGeneration {
V1,
V2,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskRestorePoint {
#[serde(flatten)]
pub proxy_only_resource: ProxyOnlyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DiskRestorePointProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskRestorePointList {
pub value: Vec<DiskRestorePoint>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskRestorePointProperties {
#[serde(rename = "timeCreated", default, skip_serializing_if = "Option::is_none")]
pub time_created: Option<String>,
#[serde(rename = "sourceResourceId", default, skip_serializing_if = "Option::is_none")]
pub source_resource_id: Option<String>,
#[serde(rename = "osType", default, skip_serializing_if = "Option::is_none")]
pub os_type: Option<disk_restore_point_properties::OsType>,
#[serde(rename = "hyperVGeneration", default, skip_serializing_if = "Option::is_none")]
pub hyper_v_generation: Option<disk_restore_point_properties::HyperVGeneration>,
#[serde(rename = "purchasePlan", default, skip_serializing_if = "Option::is_none")]
pub purchase_plan: Option<PurchasePlan>,
#[serde(rename = "familyId", default, skip_serializing_if = "Option::is_none")]
pub family_id: Option<String>,
#[serde(rename = "sourceUniqueId", default, skip_serializing_if = "Option::is_none")]
pub source_unique_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub encryption: Option<Encryption>,
}
pub mod disk_restore_point_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsType {
Windows,
Linux,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HyperVGeneration {
V1,
V2,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskSku {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<disk_sku::Name>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tier: Option<String>,
}
pub mod disk_sku {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Name {
#[serde(rename = "Standard_LRS")]
StandardLrs,
#[serde(rename = "Premium_LRS")]
PremiumLrs,
#[serde(rename = "StandardSSD_LRS")]
StandardSsdLrs,
#[serde(rename = "UltraSSD_LRS")]
UltraSsdLrs,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum DiskState {
Unattached,
Attached,
Reserved,
#[serde(rename = "ActiveSAS")]
ActiveSas,
ReadyToUpload,
ActiveUpload,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskUpdate {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DiskUpdateProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<DiskSku>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiskUpdateProperties {
#[serde(rename = "osType", default, skip_serializing_if = "Option::is_none")]
pub os_type: Option<disk_update_properties::OsType>,
#[serde(rename = "diskSizeGB", default, skip_serializing_if = "Option::is_none")]
pub disk_size_gb: Option<i32>,
#[serde(rename = "encryptionSettingsCollection", default, skip_serializing_if = "Option::is_none")]
pub encryption_settings_collection: Option<EncryptionSettingsCollection>,
#[serde(rename = "diskIOPSReadWrite", default, skip_serializing_if = "Option::is_none")]
pub disk_iops_read_write: Option<i64>,
#[serde(rename = "diskMBpsReadWrite", default, skip_serializing_if = "Option::is_none")]
pub disk_m_bps_read_write: Option<i64>,
#[serde(rename = "diskIOPSReadOnly", default, skip_serializing_if = "Option::is_none")]
pub disk_iops_read_only: Option<i64>,
#[serde(rename = "diskMBpsReadOnly", default, skip_serializing_if = "Option::is_none")]
pub disk_m_bps_read_only: Option<i64>,
#[serde(rename = "maxShares", default, skip_serializing_if = "Option::is_none")]
pub max_shares: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub encryption: Option<Encryption>,
#[serde(rename = "networkAccessPolicy", default, skip_serializing_if = "Option::is_none")]
pub network_access_policy: Option<NetworkAccessPolicy>,
#[serde(rename = "diskAccessId", default, skip_serializing_if = "Option::is_none")]
pub disk_access_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tier: Option<String>,
#[serde(rename = "burstingEnabled", default, skip_serializing_if = "Option::is_none")]
pub bursting_enabled: Option<bool>,
#[serde(rename = "purchasePlan", default, skip_serializing_if = "Option::is_none")]
pub purchase_plan: Option<PurchasePlan>,
}
pub mod disk_update_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsType {
Windows,
Linux,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Encryption {
#[serde(rename = "diskEncryptionSetId", default, skip_serializing_if = "Option::is_none")]
pub disk_encryption_set_id: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<EncryptionType>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EncryptionImages {
#[serde(rename = "osDiskImage", default, skip_serializing_if = "Option::is_none")]
pub os_disk_image: Option<OsDiskImageEncryption>,
#[serde(rename = "dataDiskImages", default, skip_serializing_if = "Vec::is_empty")]
pub data_disk_images: Vec<DataDiskImageEncryption>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EncryptionSetIdentity {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<encryption_set_identity::Type>,
#[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")]
pub principal_id: Option<String>,
#[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")]
pub tenant_id: Option<String>,
}
pub mod encryption_set_identity {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
SystemAssigned,
None,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EncryptionSetProperties {
#[serde(rename = "encryptionType", default, skip_serializing_if = "Option::is_none")]
pub encryption_type: Option<DiskEncryptionSetType>,
#[serde(rename = "activeKey", default, skip_serializing_if = "Option::is_none")]
pub active_key: Option<KeyForDiskEncryptionSet>,
#[serde(rename = "previousKeys", default, skip_serializing_if = "Vec::is_empty")]
pub previous_keys: Vec<KeyForDiskEncryptionSet>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EncryptionSettingsCollection {
pub enabled: bool,
#[serde(rename = "encryptionSettings", default, skip_serializing_if = "Vec::is_empty")]
pub encryption_settings: Vec<EncryptionSettingsElement>,
#[serde(rename = "encryptionSettingsVersion", default, skip_serializing_if = "Option::is_none")]
pub encryption_settings_version: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EncryptionSettingsElement {
#[serde(rename = "diskEncryptionKey", default, skip_serializing_if = "Option::is_none")]
pub disk_encryption_key: Option<KeyVaultAndSecretReference>,
#[serde(rename = "keyEncryptionKey", default, skip_serializing_if = "Option::is_none")]
pub key_encryption_key: Option<KeyVaultAndKeyReference>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum EncryptionType {
EncryptionAtRestWithPlatformKey,
EncryptionAtRestWithCustomerKey,
EncryptionAtRestWithPlatformAndCustomerKeys,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ExtendedLocation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<ExtendedLocationType>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ExtendedLocationType {
EdgeZone,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Extension {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<CloudServiceExtensionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Gallery {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GalleryProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryApplication {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GalleryApplicationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryApplicationList {
pub value: Vec<GalleryApplication>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryApplicationProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub eula: Option<String>,
#[serde(rename = "privacyStatementUri", default, skip_serializing_if = "Option::is_none")]
pub privacy_statement_uri: Option<String>,
#[serde(rename = "releaseNoteUri", default, skip_serializing_if = "Option::is_none")]
pub release_note_uri: Option<String>,
#[serde(rename = "endOfLifeDate", default, skip_serializing_if = "Option::is_none")]
pub end_of_life_date: Option<String>,
#[serde(rename = "supportedOSType")]
pub supported_os_type: gallery_application_properties::SupportedOsType,
}
pub mod gallery_application_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum SupportedOsType {
Windows,
Linux,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryApplicationUpdate {
#[serde(flatten)]
pub update_resource_definition: UpdateResourceDefinition,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GalleryApplicationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryApplicationVersion {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GalleryApplicationVersionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryApplicationVersionList {
pub value: Vec<GalleryApplicationVersion>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryApplicationVersionProperties {
#[serde(rename = "publishingProfile")]
pub publishing_profile: GalleryApplicationVersionPublishingProfile,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<gallery_application_version_properties::ProvisioningState>,
#[serde(rename = "replicationStatus", default, skip_serializing_if = "Option::is_none")]
pub replication_status: Option<ReplicationStatus>,
}
pub mod gallery_application_version_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Creating,
Updating,
Failed,
Succeeded,
Deleting,
Migrating,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryApplicationVersionPublishingProfile {
#[serde(flatten)]
pub gallery_artifact_publishing_profile_base: GalleryArtifactPublishingProfileBase,
pub source: UserArtifactSource,
#[serde(rename = "manageActions", default, skip_serializing_if = "Option::is_none")]
pub manage_actions: Option<UserArtifactManage>,
#[serde(rename = "enableHealthCheck", default, skip_serializing_if = "Option::is_none")]
pub enable_health_check: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryApplicationVersionUpdate {
#[serde(flatten)]
pub update_resource_definition: UpdateResourceDefinition,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GalleryApplicationVersionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryArtifactPublishingProfileBase {
#[serde(rename = "targetRegions", default, skip_serializing_if = "Vec::is_empty")]
pub target_regions: Vec<TargetRegion>,
#[serde(rename = "replicaCount", default, skip_serializing_if = "Option::is_none")]
pub replica_count: Option<i32>,
#[serde(rename = "excludeFromLatest", default, skip_serializing_if = "Option::is_none")]
pub exclude_from_latest: Option<bool>,
#[serde(rename = "publishedDate", default, skip_serializing_if = "Option::is_none")]
pub published_date: Option<String>,
#[serde(rename = "endOfLifeDate", default, skip_serializing_if = "Option::is_none")]
pub end_of_life_date: Option<String>,
#[serde(rename = "storageAccountType", default, skip_serializing_if = "Option::is_none")]
pub storage_account_type: Option<gallery_artifact_publishing_profile_base::StorageAccountType>,
}
pub mod gallery_artifact_publishing_profile_base {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum StorageAccountType {
#[serde(rename = "Standard_LRS")]
StandardLrs,
#[serde(rename = "Standard_ZRS")]
StandardZrs,
#[serde(rename = "Premium_LRS")]
PremiumLrs,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryArtifactSource {
#[serde(rename = "managedImage")]
pub managed_image: ManagedArtifact,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryArtifactVersionSource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryDataDiskImage {
#[serde(flatten)]
pub gallery_disk_image: GalleryDiskImage,
pub lun: i32,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryDiskImage {
#[serde(rename = "sizeInGB", default, skip_serializing_if = "Option::is_none")]
pub size_in_gb: Option<i32>,
#[serde(rename = "hostCaching", default, skip_serializing_if = "Option::is_none")]
pub host_caching: Option<gallery_disk_image::HostCaching>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub source: Option<GalleryArtifactVersionSource>,
}
pub mod gallery_disk_image {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HostCaching {
None,
ReadOnly,
ReadWrite,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryIdentifier {
#[serde(rename = "uniqueName", default, skip_serializing_if = "Option::is_none")]
pub unique_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImage {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GalleryImageProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageIdentifier {
pub publisher: String,
pub offer: String,
pub sku: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageList {
pub value: Vec<GalleryImage>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub eula: Option<String>,
#[serde(rename = "privacyStatementUri", default, skip_serializing_if = "Option::is_none")]
pub privacy_statement_uri: Option<String>,
#[serde(rename = "releaseNoteUri", default, skip_serializing_if = "Option::is_none")]
pub release_note_uri: Option<String>,
#[serde(rename = "osType")]
pub os_type: gallery_image_properties::OsType,
#[serde(rename = "osState")]
pub os_state: gallery_image_properties::OsState,
#[serde(rename = "hyperVGeneration", default, skip_serializing_if = "Option::is_none")]
pub hyper_v_generation: Option<gallery_image_properties::HyperVGeneration>,
#[serde(rename = "endOfLifeDate", default, skip_serializing_if = "Option::is_none")]
pub end_of_life_date: Option<String>,
pub identifier: GalleryImageIdentifier,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub recommended: Option<RecommendedMachineConfiguration>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub disallowed: Option<Disallowed>,
#[serde(rename = "purchasePlan", default, skip_serializing_if = "Option::is_none")]
pub purchase_plan: Option<ImagePurchasePlan>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<gallery_image_properties::ProvisioningState>,
}
pub mod gallery_image_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsType {
Windows,
Linux,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsState {
Generalized,
Specialized,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HyperVGeneration {
V1,
V2,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Creating,
Updating,
Failed,
Succeeded,
Deleting,
Migrating,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageUpdate {
#[serde(flatten)]
pub update_resource_definition: UpdateResourceDefinition,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GalleryImageProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageVersion {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GalleryImageVersionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageVersionList {
pub value: Vec<GalleryImageVersion>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageVersionProperties {
#[serde(rename = "publishingProfile", default, skip_serializing_if = "Option::is_none")]
pub publishing_profile: Option<GalleryImageVersionPublishingProfile>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<gallery_image_version_properties::ProvisioningState>,
#[serde(rename = "storageProfile")]
pub storage_profile: GalleryImageVersionStorageProfile,
#[serde(rename = "replicationStatus", default, skip_serializing_if = "Option::is_none")]
pub replication_status: Option<ReplicationStatus>,
}
pub mod gallery_image_version_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Creating,
Updating,
Failed,
Succeeded,
Deleting,
Migrating,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageVersionPublishingProfile {
#[serde(flatten)]
pub gallery_artifact_publishing_profile_base: GalleryArtifactPublishingProfileBase,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageVersionStorageProfile {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub source: Option<GalleryArtifactVersionSource>,
#[serde(rename = "osDiskImage", default, skip_serializing_if = "Option::is_none")]
pub os_disk_image: Option<GalleryOsDiskImage>,
#[serde(rename = "dataDiskImages", default, skip_serializing_if = "Vec::is_empty")]
pub data_disk_images: Vec<GalleryDataDiskImage>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryImageVersionUpdate {
#[serde(flatten)]
pub update_resource_definition: UpdateResourceDefinition,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GalleryImageVersionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryList {
pub value: Vec<Gallery>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryOsDiskImage {
#[serde(flatten)]
pub gallery_disk_image: GalleryDiskImage,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identifier: Option<GalleryIdentifier>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<gallery_properties::ProvisioningState>,
}
pub mod gallery_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Creating,
Updating,
Failed,
Succeeded,
Deleting,
Migrating,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GalleryUpdate {
#[serde(flatten)]
pub update_resource_definition: UpdateResourceDefinition,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GalleryProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GrantAccessData {
pub access: grant_access_data::Access,
#[serde(rename = "durationInSeconds")]
pub duration_in_seconds: i32,
}
pub mod grant_access_data {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Access {
None,
Read,
Write,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct HardwareProfile {
#[serde(rename = "vmSize", default, skip_serializing_if = "Option::is_none")]
pub vm_size: Option<hardware_profile::VmSize>,
}
pub mod hardware_profile {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum VmSize {
#[serde(rename = "Basic_A0")]
BasicA0,
#[serde(rename = "Basic_A1")]
BasicA1,
#[serde(rename = "Basic_A2")]
BasicA2,
#[serde(rename = "Basic_A3")]
BasicA3,
#[serde(rename = "Basic_A4")]
BasicA4,
#[serde(rename = "Standard_A0")]
StandardA0,
#[serde(rename = "Standard_A1")]
StandardA1,
#[serde(rename = "Standard_A2")]
StandardA2,
#[serde(rename = "Standard_A3")]
StandardA3,
#[serde(rename = "Standard_A4")]
StandardA4,
#[serde(rename = "Standard_A5")]
StandardA5,
#[serde(rename = "Standard_A6")]
StandardA6,
#[serde(rename = "Standard_A7")]
StandardA7,
#[serde(rename = "Standard_A8")]
StandardA8,
#[serde(rename = "Standard_A9")]
StandardA9,
#[serde(rename = "Standard_A10")]
StandardA10,
#[serde(rename = "Standard_A11")]
StandardA11,
#[serde(rename = "Standard_A1_v2")]
StandardA1V2,
#[serde(rename = "Standard_A2_v2")]
StandardA2V2,
#[serde(rename = "Standard_A4_v2")]
StandardA4V2,
#[serde(rename = "Standard_A8_v2")]
StandardA8V2,
#[serde(rename = "Standard_A2m_v2")]
StandardA2mV2,
#[serde(rename = "Standard_A4m_v2")]
StandardA4mV2,
#[serde(rename = "Standard_A8m_v2")]
StandardA8mV2,
#[serde(rename = "Standard_B1s")]
StandardB1s,
#[serde(rename = "Standard_B1ms")]
StandardB1ms,
#[serde(rename = "Standard_B2s")]
StandardB2s,
#[serde(rename = "Standard_B2ms")]
StandardB2ms,
#[serde(rename = "Standard_B4ms")]
StandardB4ms,
#[serde(rename = "Standard_B8ms")]
StandardB8ms,
#[serde(rename = "Standard_D1")]
StandardD1,
#[serde(rename = "Standard_D2")]
StandardD2,
#[serde(rename = "Standard_D3")]
StandardD3,
#[serde(rename = "Standard_D4")]
StandardD4,
#[serde(rename = "Standard_D11")]
StandardD11,
#[serde(rename = "Standard_D12")]
StandardD12,
#[serde(rename = "Standard_D13")]
StandardD13,
#[serde(rename = "Standard_D14")]
StandardD14,
#[serde(rename = "Standard_D1_v2")]
StandardD1V2,
#[serde(rename = "Standard_D2_v2")]
StandardD2V2,
#[serde(rename = "Standard_D3_v2")]
StandardD3V2,
#[serde(rename = "Standard_D4_v2")]
StandardD4V2,
#[serde(rename = "Standard_D5_v2")]
StandardD5V2,
#[serde(rename = "Standard_D2_v3")]
StandardD2V3,
#[serde(rename = "Standard_D4_v3")]
StandardD4V3,
#[serde(rename = "Standard_D8_v3")]
StandardD8V3,
#[serde(rename = "Standard_D16_v3")]
StandardD16V3,
#[serde(rename = "Standard_D32_v3")]
StandardD32V3,
#[serde(rename = "Standard_D64_v3")]
StandardD64V3,
#[serde(rename = "Standard_D2s_v3")]
StandardD2sV3,
#[serde(rename = "Standard_D4s_v3")]
StandardD4sV3,
#[serde(rename = "Standard_D8s_v3")]
StandardD8sV3,
#[serde(rename = "Standard_D16s_v3")]
StandardD16sV3,
#[serde(rename = "Standard_D32s_v3")]
StandardD32sV3,
#[serde(rename = "Standard_D64s_v3")]
StandardD64sV3,
#[serde(rename = "Standard_D11_v2")]
StandardD11V2,
#[serde(rename = "Standard_D12_v2")]
StandardD12V2,
#[serde(rename = "Standard_D13_v2")]
StandardD13V2,
#[serde(rename = "Standard_D14_v2")]
StandardD14V2,
#[serde(rename = "Standard_D15_v2")]
StandardD15V2,
#[serde(rename = "Standard_DS1")]
StandardDs1,
#[serde(rename = "Standard_DS2")]
StandardDs2,
#[serde(rename = "Standard_DS3")]
StandardDs3,
#[serde(rename = "Standard_DS4")]
StandardDs4,
#[serde(rename = "Standard_DS11")]
StandardDs11,
#[serde(rename = "Standard_DS12")]
StandardDs12,
#[serde(rename = "Standard_DS13")]
StandardDs13,
#[serde(rename = "Standard_DS14")]
StandardDs14,
#[serde(rename = "Standard_DS1_v2")]
StandardDs1V2,
#[serde(rename = "Standard_DS2_v2")]
StandardDs2V2,
#[serde(rename = "Standard_DS3_v2")]
StandardDs3V2,
#[serde(rename = "Standard_DS4_v2")]
StandardDs4V2,
#[serde(rename = "Standard_DS5_v2")]
StandardDs5V2,
#[serde(rename = "Standard_DS11_v2")]
StandardDs11V2,
#[serde(rename = "Standard_DS12_v2")]
StandardDs12V2,
#[serde(rename = "Standard_DS13_v2")]
StandardDs13V2,
#[serde(rename = "Standard_DS14_v2")]
StandardDs14V2,
#[serde(rename = "Standard_DS15_v2")]
StandardDs15V2,
#[serde(rename = "Standard_DS13-4_v2")]
StandardDs134V2,
#[serde(rename = "Standard_DS13-2_v2")]
StandardDs132V2,
#[serde(rename = "Standard_DS14-8_v2")]
StandardDs148V2,
#[serde(rename = "Standard_DS14-4_v2")]
StandardDs144V2,
#[serde(rename = "Standard_E2_v3")]
StandardE2V3,
#[serde(rename = "Standard_E4_v3")]
StandardE4V3,
#[serde(rename = "Standard_E8_v3")]
StandardE8V3,
#[serde(rename = "Standard_E16_v3")]
StandardE16V3,
#[serde(rename = "Standard_E32_v3")]
StandardE32V3,
#[serde(rename = "Standard_E64_v3")]
StandardE64V3,
#[serde(rename = "Standard_E2s_v3")]
StandardE2sV3,
#[serde(rename = "Standard_E4s_v3")]
StandardE4sV3,
#[serde(rename = "Standard_E8s_v3")]
StandardE8sV3,
#[serde(rename = "Standard_E16s_v3")]
StandardE16sV3,
#[serde(rename = "Standard_E32s_v3")]
StandardE32sV3,
#[serde(rename = "Standard_E64s_v3")]
StandardE64sV3,
#[serde(rename = "Standard_E32-16_v3")]
StandardE3216V3,
#[serde(rename = "Standard_E32-8s_v3")]
StandardE328sV3,
#[serde(rename = "Standard_E64-32s_v3")]
StandardE6432sV3,
#[serde(rename = "Standard_E64-16s_v3")]
StandardE6416sV3,
#[serde(rename = "Standard_F1")]
StandardF1,
#[serde(rename = "Standard_F2")]
StandardF2,
#[serde(rename = "Standard_F4")]
StandardF4,
#[serde(rename = "Standard_F8")]
StandardF8,
#[serde(rename = "Standard_F16")]
StandardF16,
#[serde(rename = "Standard_F1s")]
StandardF1s,
#[serde(rename = "Standard_F2s")]
StandardF2s,
#[serde(rename = "Standard_F4s")]
StandardF4s,
#[serde(rename = "Standard_F8s")]
StandardF8s,
#[serde(rename = "Standard_F16s")]
StandardF16s,
#[serde(rename = "Standard_F2s_v2")]
StandardF2sV2,
#[serde(rename = "Standard_F4s_v2")]
StandardF4sV2,
#[serde(rename = "Standard_F8s_v2")]
StandardF8sV2,
#[serde(rename = "Standard_F16s_v2")]
StandardF16sV2,
#[serde(rename = "Standard_F32s_v2")]
StandardF32sV2,
#[serde(rename = "Standard_F64s_v2")]
StandardF64sV2,
#[serde(rename = "Standard_F72s_v2")]
StandardF72sV2,
#[serde(rename = "Standard_G1")]
StandardG1,
#[serde(rename = "Standard_G2")]
StandardG2,
#[serde(rename = "Standard_G3")]
StandardG3,
#[serde(rename = "Standard_G4")]
StandardG4,
#[serde(rename = "Standard_G5")]
StandardG5,
#[serde(rename = "Standard_GS1")]
StandardGs1,
#[serde(rename = "Standard_GS2")]
StandardGs2,
#[serde(rename = "Standard_GS3")]
StandardGs3,
#[serde(rename = "Standard_GS4")]
StandardGs4,
#[serde(rename = "Standard_GS5")]
StandardGs5,
#[serde(rename = "Standard_GS4-8")]
StandardGs48,
#[serde(rename = "Standard_GS4-4")]
StandardGs44,
#[serde(rename = "Standard_GS5-16")]
StandardGs516,
#[serde(rename = "Standard_GS5-8")]
StandardGs58,
#[serde(rename = "Standard_H8")]
StandardH8,
#[serde(rename = "Standard_H16")]
StandardH16,
#[serde(rename = "Standard_H8m")]
StandardH8m,
#[serde(rename = "Standard_H16m")]
StandardH16m,
#[serde(rename = "Standard_H16r")]
StandardH16r,
#[serde(rename = "Standard_H16mr")]
StandardH16mr,
#[serde(rename = "Standard_L4s")]
StandardL4s,
#[serde(rename = "Standard_L8s")]
StandardL8s,
#[serde(rename = "Standard_L16s")]
StandardL16s,
#[serde(rename = "Standard_L32s")]
StandardL32s,
#[serde(rename = "Standard_M64s")]
StandardM64s,
#[serde(rename = "Standard_M64ms")]
StandardM64ms,
#[serde(rename = "Standard_M128s")]
StandardM128s,
#[serde(rename = "Standard_M128ms")]
StandardM128ms,
#[serde(rename = "Standard_M64-32ms")]
StandardM6432ms,
#[serde(rename = "Standard_M64-16ms")]
StandardM6416ms,
#[serde(rename = "Standard_M128-64ms")]
StandardM12864ms,
#[serde(rename = "Standard_M128-32ms")]
StandardM12832ms,
#[serde(rename = "Standard_NC6")]
StandardNc6,
#[serde(rename = "Standard_NC12")]
StandardNc12,
#[serde(rename = "Standard_NC24")]
StandardNc24,
#[serde(rename = "Standard_NC24r")]
StandardNc24r,
#[serde(rename = "Standard_NC6s_v2")]
StandardNc6sV2,
#[serde(rename = "Standard_NC12s_v2")]
StandardNc12sV2,
#[serde(rename = "Standard_NC24s_v2")]
StandardNc24sV2,
#[serde(rename = "Standard_NC24rs_v2")]
StandardNc24rsV2,
#[serde(rename = "Standard_NC6s_v3")]
StandardNc6sV3,
#[serde(rename = "Standard_NC12s_v3")]
StandardNc12sV3,
#[serde(rename = "Standard_NC24s_v3")]
StandardNc24sV3,
#[serde(rename = "Standard_NC24rs_v3")]
StandardNc24rsV3,
#[serde(rename = "Standard_ND6s")]
StandardNd6s,
#[serde(rename = "Standard_ND12s")]
StandardNd12s,
#[serde(rename = "Standard_ND24s")]
StandardNd24s,
#[serde(rename = "Standard_ND24rs")]
StandardNd24rs,
#[serde(rename = "Standard_NV6")]
StandardNv6,
#[serde(rename = "Standard_NV12")]
StandardNv12,
#[serde(rename = "Standard_NV24")]
StandardNv24,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HyperVGenerationType {
V1,
V2,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Image {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ImageProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImageDataDisk {
#[serde(flatten)]
pub image_disk: ImageDisk,
pub lun: i32,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImageDisk {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub snapshot: Option<SubResource>,
#[serde(rename = "managedDisk", default, skip_serializing_if = "Option::is_none")]
pub managed_disk: Option<SubResource>,
#[serde(rename = "blobUri", default, skip_serializing_if = "Option::is_none")]
pub blob_uri: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub caching: Option<image_disk::Caching>,
#[serde(rename = "diskSizeGB", default, skip_serializing_if = "Option::is_none")]
pub disk_size_gb: Option<i32>,
#[serde(rename = "storageAccountType", default, skip_serializing_if = "Option::is_none")]
pub storage_account_type: Option<StorageAccountType>,
#[serde(rename = "diskEncryptionSet", default, skip_serializing_if = "Option::is_none")]
pub disk_encryption_set: Option<DiskEncryptionSetParameters>,
}
pub mod image_disk {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Caching {
None,
ReadOnly,
ReadWrite,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImageDiskReference {
pub id: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub lun: Option<i32>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImageListResult {
pub value: Vec<Image>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImageOsDisk {
#[serde(flatten)]
pub image_disk: ImageDisk,
#[serde(rename = "osType")]
pub os_type: image_os_disk::OsType,
#[serde(rename = "osState")]
pub os_state: image_os_disk::OsState,
}
pub mod image_os_disk {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsType {
Windows,
Linux,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsState {
Generalized,
Specialized,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImageProperties {
#[serde(rename = "sourceVirtualMachine", default, skip_serializing_if = "Option::is_none")]
pub source_virtual_machine: Option<SubResource>,
#[serde(rename = "storageProfile", default, skip_serializing_if = "Option::is_none")]
pub storage_profile: Option<ImageStorageProfile>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "hyperVGeneration", default, skip_serializing_if = "Option::is_none")]
pub hyper_v_generation: Option<HyperVGenerationType>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImagePurchasePlan {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub publisher: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub product: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImageReference {
#[serde(flatten)]
pub sub_resource: SubResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub publisher: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub offer: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
#[serde(rename = "exactVersion", default, skip_serializing_if = "Option::is_none")]
pub exact_version: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImageStorageProfile {
#[serde(rename = "osDisk", default, skip_serializing_if = "Option::is_none")]
pub os_disk: Option<ImageOsDisk>,
#[serde(rename = "dataDisks", default, skip_serializing_if = "Vec::is_empty")]
pub data_disks: Vec<ImageDataDisk>,
#[serde(rename = "zoneResilient", default, skip_serializing_if = "Option::is_none")]
pub zone_resilient: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImageUpdate {
#[serde(flatten)]
pub update_resource: UpdateResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ImageProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InnerError {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub exceptiontype: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub errordetail: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InstanceSku {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tier: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InstanceViewStatus {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub level: Option<instance_view_status::Level>,
#[serde(rename = "displayStatus", default, skip_serializing_if = "Option::is_none")]
pub display_status: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub time: Option<String>,
}
pub mod instance_view_status {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Level {
Info,
Warning,
Error,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InstanceViewStatusesSummary {
#[serde(rename = "statusesSummary", default, skip_serializing_if = "Vec::is_empty")]
pub statuses_summary: Vec<StatusCodeCount>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct KeyForDiskEncryptionSet {
#[serde(rename = "sourceVault", default, skip_serializing_if = "Option::is_none")]
pub source_vault: Option<SourceVault>,
#[serde(rename = "keyUrl")]
pub key_url: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct KeyVaultAndKeyReference {
#[serde(rename = "sourceVault")]
pub source_vault: SourceVault,
#[serde(rename = "keyUrl")]
pub key_url: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct KeyVaultAndSecretReference {
#[serde(rename = "sourceVault")]
pub source_vault: SourceVault,
#[serde(rename = "secretUrl")]
pub secret_url: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct KeyVaultKeyReference {
#[serde(rename = "keyUrl")]
pub key_url: String,
#[serde(rename = "sourceVault")]
pub source_vault: SubResource,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct KeyVaultSecretReference {
#[serde(rename = "secretUrl")]
pub secret_url: String,
#[serde(rename = "sourceVault")]
pub source_vault: SubResource,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LastPatchInstallationSummary {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<last_patch_installation_summary::Status>,
#[serde(rename = "installationActivityId", default, skip_serializing_if = "Option::is_none")]
pub installation_activity_id: Option<String>,
#[serde(rename = "maintenanceWindowExceeded", default, skip_serializing_if = "Option::is_none")]
pub maintenance_window_exceeded: Option<bool>,
#[serde(rename = "rebootStatus", default, skip_serializing_if = "Option::is_none")]
pub reboot_status: Option<last_patch_installation_summary::RebootStatus>,
#[serde(rename = "notSelectedPatchCount", default, skip_serializing_if = "Option::is_none")]
pub not_selected_patch_count: Option<i32>,
#[serde(rename = "excludedPatchCount", default, skip_serializing_if = "Option::is_none")]
pub excluded_patch_count: Option<i32>,
#[serde(rename = "pendingPatchCount", default, skip_serializing_if = "Option::is_none")]
pub pending_patch_count: Option<i32>,
#[serde(rename = "installedPatchCount", default, skip_serializing_if = "Option::is_none")]
pub installed_patch_count: Option<i32>,
#[serde(rename = "failedPatchCount", default, skip_serializing_if = "Option::is_none")]
pub failed_patch_count: Option<i32>,
#[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")]
pub start_time: Option<String>,
#[serde(rename = "lastModifiedTime", default, skip_serializing_if = "Option::is_none")]
pub last_modified_time: Option<String>,
#[serde(rename = "startedBy", default, skip_serializing_if = "Option::is_none")]
pub started_by: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ApiError>,
}
pub mod last_patch_installation_summary {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
InProgress,
Failed,
Succeeded,
CompletedWithWarnings,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum RebootStatus {
NotNeeded,
Required,
Started,
Failed,
Completed,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LinuxConfiguration {
#[serde(rename = "disablePasswordAuthentication", default, skip_serializing_if = "Option::is_none")]
pub disable_password_authentication: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub ssh: Option<SshConfiguration>,
#[serde(rename = "provisionVMAgent", default, skip_serializing_if = "Option::is_none")]
pub provision_vm_agent: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ListUsagesResult {
pub value: Vec<Usage>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LoadBalancerConfiguration {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<LoadBalancerConfigurationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LoadBalancerConfigurationProperties {
#[serde(rename = "frontendIPConfigurations", default, skip_serializing_if = "Vec::is_empty")]
pub frontend_ip_configurations: Vec<LoadBalancerFrontendIpConfiguration>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LoadBalancerFrontendIpConfiguration {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<LoadBalancerFrontendIpConfigurationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LoadBalancerFrontendIpConfigurationProperties {
#[serde(rename = "publicIPAddress", default, skip_serializing_if = "Option::is_none")]
pub public_ip_address: Option<SubResource>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub subnet: Option<SubResource>,
#[serde(rename = "privateIPAddress", default, skip_serializing_if = "Option::is_none")]
pub private_ip_address: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LogAnalyticsInputBase {
#[serde(rename = "blobContainerSasUri")]
pub blob_container_sas_uri: String,
#[serde(rename = "fromTime")]
pub from_time: String,
#[serde(rename = "toTime")]
pub to_time: String,
#[serde(rename = "groupByThrottlePolicy", default, skip_serializing_if = "Option::is_none")]
pub group_by_throttle_policy: Option<bool>,
#[serde(rename = "groupByOperationName", default, skip_serializing_if = "Option::is_none")]
pub group_by_operation_name: Option<bool>,
#[serde(rename = "groupByResourceName", default, skip_serializing_if = "Option::is_none")]
pub group_by_resource_name: Option<bool>,
#[serde(rename = "groupByClientApplicationId", default, skip_serializing_if = "Option::is_none")]
pub group_by_client_application_id: Option<bool>,
#[serde(rename = "groupByUserAgent", default, skip_serializing_if = "Option::is_none")]
pub group_by_user_agent: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LogAnalyticsOperationResult {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<LogAnalyticsOutput>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LogAnalyticsOutput {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub output: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MaintenanceRedeployStatus {
#[serde(rename = "isCustomerInitiatedMaintenanceAllowed", default, skip_serializing_if = "Option::is_none")]
pub is_customer_initiated_maintenance_allowed: Option<bool>,
#[serde(rename = "preMaintenanceWindowStartTime", default, skip_serializing_if = "Option::is_none")]
pub pre_maintenance_window_start_time: Option<String>,
#[serde(rename = "preMaintenanceWindowEndTime", default, skip_serializing_if = "Option::is_none")]
pub pre_maintenance_window_end_time: Option<String>,
#[serde(rename = "maintenanceWindowStartTime", default, skip_serializing_if = "Option::is_none")]
pub maintenance_window_start_time: Option<String>,
#[serde(rename = "maintenanceWindowEndTime", default, skip_serializing_if = "Option::is_none")]
pub maintenance_window_end_time: Option<String>,
#[serde(rename = "lastOperationResultCode", default, skip_serializing_if = "Option::is_none")]
pub last_operation_result_code: Option<maintenance_redeploy_status::LastOperationResultCode>,
#[serde(rename = "lastOperationMessage", default, skip_serializing_if = "Option::is_none")]
pub last_operation_message: Option<String>,
}
pub mod maintenance_redeploy_status {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum LastOperationResultCode {
None,
RetryLater,
MaintenanceAborted,
MaintenanceCompleted,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ManagedArtifact {
pub id: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ManagedDiskParameters {
#[serde(flatten)]
pub sub_resource: SubResource,
#[serde(rename = "storageAccountType", default, skip_serializing_if = "Option::is_none")]
pub storage_account_type: Option<StorageAccountType>,
#[serde(rename = "diskEncryptionSet", default, skip_serializing_if = "Option::is_none")]
pub disk_encryption_set: Option<DiskEncryptionSetParameters>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum NetworkAccessPolicy {
AllowAll,
AllowPrivate,
DenyAll,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NetworkInterfaceReference {
#[serde(flatten)]
pub sub_resource: SubResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<NetworkInterfaceReferenceProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NetworkInterfaceReferenceProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub primary: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NetworkProfile {
#[serde(rename = "networkInterfaces", default, skip_serializing_if = "Vec::is_empty")]
pub network_interfaces: Vec<NetworkInterfaceReference>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OsDisk {
#[serde(rename = "osType", default, skip_serializing_if = "Option::is_none")]
pub os_type: Option<os_disk::OsType>,
#[serde(rename = "encryptionSettings", default, skip_serializing_if = "Option::is_none")]
pub encryption_settings: Option<DiskEncryptionSettings>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub vhd: Option<VirtualHardDisk>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub image: Option<VirtualHardDisk>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub caching: Option<Caching>,
#[serde(rename = "writeAcceleratorEnabled", default, skip_serializing_if = "Option::is_none")]
pub write_accelerator_enabled: Option<bool>,
#[serde(rename = "diffDiskSettings", default, skip_serializing_if = "Option::is_none")]
pub diff_disk_settings: Option<DiffDiskSettings>,
#[serde(rename = "createOption")]
pub create_option: CreateOption,
#[serde(rename = "diskSizeGB", default, skip_serializing_if = "Option::is_none")]
pub disk_size_gb: Option<i32>,
#[serde(rename = "managedDisk", default, skip_serializing_if = "Option::is_none")]
pub managed_disk: Option<ManagedDiskParameters>,
}
pub mod os_disk {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsType {
Windows,
Linux,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OsDiskImage {
#[serde(rename = "operatingSystem")]
pub operating_system: os_disk_image::OperatingSystem,
}
pub mod os_disk_image {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OperatingSystem {
Windows,
Linux,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OsDiskImageEncryption {
#[serde(flatten)]
pub disk_image_encryption: DiskImageEncryption,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OsProfile {
#[serde(rename = "computerName", default, skip_serializing_if = "Option::is_none")]
pub computer_name: Option<String>,
#[serde(rename = "adminUsername", default, skip_serializing_if = "Option::is_none")]
pub admin_username: Option<String>,
#[serde(rename = "adminPassword", default, skip_serializing_if = "Option::is_none")]
pub admin_password: Option<String>,
#[serde(rename = "customData", default, skip_serializing_if = "Option::is_none")]
pub custom_data: Option<String>,
#[serde(rename = "windowsConfiguration", default, skip_serializing_if = "Option::is_none")]
pub windows_configuration: Option<WindowsConfiguration>,
#[serde(rename = "linuxConfiguration", default, skip_serializing_if = "Option::is_none")]
pub linux_configuration: Option<LinuxConfiguration>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub secrets: Vec<VaultSecretGroup>,
#[serde(rename = "allowExtensionOperations", default, skip_serializing_if = "Option::is_none")]
pub allow_extension_operations: Option<bool>,
#[serde(rename = "requireGuestProvisionSignal", default, skip_serializing_if = "Option::is_none")]
pub require_guest_provision_signal: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OrchestrationServiceStateInput {
#[serde(rename = "serviceName")]
pub service_name: orchestration_service_state_input::ServiceName,
pub action: orchestration_service_state_input::Action,
}
pub mod orchestration_service_state_input {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ServiceName {
AutomaticRepairs,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Action {
Resume,
Suspend,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OrchestrationServiceSummary {
#[serde(rename = "serviceName", default, skip_serializing_if = "Option::is_none")]
pub service_name: Option<orchestration_service_summary::ServiceName>,
#[serde(rename = "serviceState", default, skip_serializing_if = "Option::is_none")]
pub service_state: Option<orchestration_service_summary::ServiceState>,
}
pub mod orchestration_service_summary {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ServiceName {
AutomaticRepairs,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ServiceState {
NotRunning,
Running,
Suspended,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PatchSettings {
#[serde(rename = "patchMode", default, skip_serializing_if = "Option::is_none")]
pub patch_mode: Option<patch_settings::PatchMode>,
}
pub mod patch_settings {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PatchMode {
Manual,
#[serde(rename = "AutomaticByOS")]
AutomaticByOs,
AutomaticByPlatform,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Plan {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub publisher: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub product: Option<String>,
#[serde(rename = "promotionCode", default, skip_serializing_if = "Option::is_none")]
pub promotion_code: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpoint {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnection {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PrivateEndpointConnectionProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnectionListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PrivateEndpointConnection>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnectionProperties {
#[serde(rename = "privateEndpoint", default, skip_serializing_if = "Option::is_none")]
pub private_endpoint: Option<PrivateEndpoint>,
#[serde(rename = "privateLinkServiceConnectionState")]
pub private_link_service_connection_state: PrivateLinkServiceConnectionState,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<PrivateEndpointConnectionProvisioningState>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PrivateEndpointConnectionProvisioningState {
Succeeded,
Creating,
Deleting,
Failed,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PrivateEndpointServiceConnectionStatus {
Pending,
Approved,
Rejected,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PrivateLinkResourceProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResourceListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PrivateLinkResource>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResourceProperties {
#[serde(rename = "groupId", default, skip_serializing_if = "Option::is_none")]
pub group_id: Option<String>,
#[serde(rename = "requiredMembers", default, skip_serializing_if = "Vec::is_empty")]
pub required_members: Vec<String>,
#[serde(rename = "requiredZoneNames", default, skip_serializing_if = "Vec::is_empty")]
pub required_zone_names: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkServiceConnectionState {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<PrivateEndpointServiceConnectionStatus>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "actionsRequired", default, skip_serializing_if = "Option::is_none")]
pub actions_required: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProximityPlacementGroup {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ProximityPlacementGroupProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProximityPlacementGroupListResult {
pub value: Vec<ProximityPlacementGroup>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProximityPlacementGroupProperties {
#[serde(rename = "proximityPlacementGroupType", default, skip_serializing_if = "Option::is_none")]
pub proximity_placement_group_type: Option<proximity_placement_group_properties::ProximityPlacementGroupType>,
#[serde(rename = "virtualMachines", default, skip_serializing_if = "Vec::is_empty")]
pub virtual_machines: Vec<SubResourceWithColocationStatus>,
#[serde(rename = "virtualMachineScaleSets", default, skip_serializing_if = "Vec::is_empty")]
pub virtual_machine_scale_sets: Vec<SubResourceWithColocationStatus>,
#[serde(rename = "availabilitySets", default, skip_serializing_if = "Vec::is_empty")]
pub availability_sets: Vec<SubResourceWithColocationStatus>,
#[serde(rename = "colocationStatus", default, skip_serializing_if = "Option::is_none")]
pub colocation_status: Option<InstanceViewStatus>,
}
pub mod proximity_placement_group_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProximityPlacementGroupType {
Standard,
Ultra,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProximityPlacementGroupUpdate {
#[serde(flatten)]
pub update_resource: UpdateResource,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProxyOnlyResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PurchasePlan {
pub publisher: String,
pub name: String,
pub product: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RecommendedMachineConfiguration {
#[serde(rename = "vCPUs", default, skip_serializing_if = "Option::is_none")]
pub v_cp_us: Option<ResourceRange>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub memory: Option<ResourceRange>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RecoveryWalkResponse {
#[serde(rename = "walkPerformed", default, skip_serializing_if = "Option::is_none")]
pub walk_performed: Option<bool>,
#[serde(rename = "nextPlatformUpdateDomain", default, skip_serializing_if = "Option::is_none")]
pub next_platform_update_domain: Option<i64>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RegionalReplicationStatus {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub region: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub state: Option<regional_replication_status::State>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub details: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub progress: Option<i32>,
}
pub mod regional_replication_status {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum State {
Unknown,
Replicating,
Completed,
Failed,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ReplicationStatus {
#[serde(rename = "aggregatedState", default, skip_serializing_if = "Option::is_none")]
pub aggregated_state: Option<replication_status::AggregatedState>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub summary: Vec<RegionalReplicationStatus>,
}
pub mod replication_status {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AggregatedState {
Unknown,
InProgress,
Completed,
Failed,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RequestRateByIntervalInput {
#[serde(flatten)]
pub log_analytics_input_base: LogAnalyticsInputBase,
#[serde(rename = "intervalLength")]
pub interval_length: request_rate_by_interval_input::IntervalLength,
}
pub mod request_rate_by_interval_input {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum IntervalLength {
ThreeMins,
FiveMins,
ThirtyMins,
SixtyMins,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceInstanceViewStatus {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(rename = "displayStatus", default, skip_serializing_if = "Option::is_none")]
pub display_status: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub time: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub level: Option<resource_instance_view_status::Level>,
}
pub mod resource_instance_view_status {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Level {
Info,
Warning,
Error,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceRange {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub min: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub max: Option<i32>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceSku {
#[serde(rename = "resourceType", default, skip_serializing_if = "Option::is_none")]
pub resource_type: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tier: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub size: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub family: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub kind: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub capacity: Option<ResourceSkuCapacity>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub locations: Vec<String>,
#[serde(rename = "locationInfo", default, skip_serializing_if = "Vec::is_empty")]
pub location_info: Vec<ResourceSkuLocationInfo>,
#[serde(rename = "apiVersions", default, skip_serializing_if = "Vec::is_empty")]
pub api_versions: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub costs: Vec<ResourceSkuCosts>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub capabilities: Vec<ResourceSkuCapabilities>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub restrictions: Vec<ResourceSkuRestrictions>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceSkuCapabilities {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceSkuCapacity {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub minimum: Option<i64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub maximum: Option<i64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub default: Option<i64>,
#[serde(rename = "scaleType", default, skip_serializing_if = "Option::is_none")]
pub scale_type: Option<resource_sku_capacity::ScaleType>,
}
pub mod resource_sku_capacity {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ScaleType {
Automatic,
Manual,
None,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceSkuCosts {
#[serde(rename = "meterID", default, skip_serializing_if = "Option::is_none")]
pub meter_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub quantity: Option<i64>,
#[serde(rename = "extendedUnit", default, skip_serializing_if = "Option::is_none")]
pub extended_unit: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceSkuLocationInfo {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub zones: Vec<String>,
#[serde(rename = "zoneDetails", default, skip_serializing_if = "Vec::is_empty")]
pub zone_details: Vec<ResourceSkuZoneDetails>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceSkuRestrictionInfo {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub locations: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub zones: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceSkuRestrictions {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<resource_sku_restrictions::Type>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub values: Vec<String>,
#[serde(rename = "restrictionInfo", default, skip_serializing_if = "Option::is_none")]
pub restriction_info: Option<ResourceSkuRestrictionInfo>,
#[serde(rename = "reasonCode", default, skip_serializing_if = "Option::is_none")]
pub reason_code: Option<resource_sku_restrictions::ReasonCode>,
}
pub mod resource_sku_restrictions {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
Location,
Zone,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ReasonCode {
QuotaId,
NotAvailableForSubscription,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceSkuZoneDetails {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub name: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub capabilities: Vec<ResourceSkuCapabilities>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceSkusResult {
pub value: Vec<ResourceSku>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceUriList {
pub value: Vec<String>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RetrieveBootDiagnosticsDataResult {
#[serde(rename = "consoleScreenshotBlobUri", default, skip_serializing_if = "Option::is_none")]
pub console_screenshot_blob_uri: Option<String>,
#[serde(rename = "serialConsoleLogBlobUri", default, skip_serializing_if = "Option::is_none")]
pub serial_console_log_blob_uri: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RoleInstance {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<InstanceSku>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<RoleInstanceProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RoleInstanceInstanceView {
#[serde(rename = "platformUpdateDomain", default, skip_serializing_if = "Option::is_none")]
pub platform_update_domain: Option<i32>,
#[serde(rename = "platformFaultDomain", default, skip_serializing_if = "Option::is_none")]
pub platform_fault_domain: Option<i32>,
#[serde(rename = "privateId", default, skip_serializing_if = "Option::is_none")]
pub private_id: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub statuses: Vec<ResourceInstanceViewStatus>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RoleInstanceListResult {
pub value: Vec<RoleInstance>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RoleInstanceNetworkProfile {
#[serde(rename = "networkInterfaces", default, skip_serializing_if = "Vec::is_empty")]
pub network_interfaces: Vec<SubResource>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RoleInstanceProperties {
#[serde(rename = "networkProfile", default, skip_serializing_if = "Option::is_none")]
pub network_profile: Option<RoleInstanceNetworkProfile>,
#[serde(rename = "instanceView", default, skip_serializing_if = "Option::is_none")]
pub instance_view: Option<RoleInstanceInstanceView>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RoleInstances {
#[serde(rename = "roleInstances")]
pub role_instances: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RollbackStatusInfo {
#[serde(rename = "successfullyRolledbackInstanceCount", default, skip_serializing_if = "Option::is_none")]
pub successfully_rolledback_instance_count: Option<i32>,
#[serde(rename = "failedRolledbackInstanceCount", default, skip_serializing_if = "Option::is_none")]
pub failed_rolledback_instance_count: Option<i32>,
#[serde(rename = "rollbackError", default, skip_serializing_if = "Option::is_none")]
pub rollback_error: Option<ApiError>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RollingUpgradePolicy {
#[serde(rename = "maxBatchInstancePercent", default, skip_serializing_if = "Option::is_none")]
pub max_batch_instance_percent: Option<i32>,
#[serde(rename = "maxUnhealthyInstancePercent", default, skip_serializing_if = "Option::is_none")]
pub max_unhealthy_instance_percent: Option<i32>,
#[serde(rename = "maxUnhealthyUpgradedInstancePercent", default, skip_serializing_if = "Option::is_none")]
pub max_unhealthy_upgraded_instance_percent: Option<i32>,
#[serde(rename = "pauseTimeBetweenBatches", default, skip_serializing_if = "Option::is_none")]
pub pause_time_between_batches: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RollingUpgradeProgressInfo {
#[serde(rename = "successfulInstanceCount", default, skip_serializing_if = "Option::is_none")]
pub successful_instance_count: Option<i32>,
#[serde(rename = "failedInstanceCount", default, skip_serializing_if = "Option::is_none")]
pub failed_instance_count: Option<i32>,
#[serde(rename = "inProgressInstanceCount", default, skip_serializing_if = "Option::is_none")]
pub in_progress_instance_count: Option<i32>,
#[serde(rename = "pendingInstanceCount", default, skip_serializing_if = "Option::is_none")]
pub pending_instance_count: Option<i32>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RollingUpgradeRunningStatus {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<rolling_upgrade_running_status::Code>,
#[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")]
pub start_time: Option<String>,
#[serde(rename = "lastAction", default, skip_serializing_if = "Option::is_none")]
pub last_action: Option<rolling_upgrade_running_status::LastAction>,
#[serde(rename = "lastActionTime", default, skip_serializing_if = "Option::is_none")]
pub last_action_time: Option<String>,
}
pub mod rolling_upgrade_running_status {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Code {
RollingForward,
Cancelled,
Completed,
Faulted,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum LastAction {
Start,
Cancel,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RollingUpgradeStatusInfo {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<RollingUpgradeStatusInfoProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RollingUpgradeStatusInfoProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub policy: Option<RollingUpgradePolicy>,
#[serde(rename = "runningStatus", default, skip_serializing_if = "Option::is_none")]
pub running_status: Option<RollingUpgradeRunningStatus>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub progress: Option<RollingUpgradeProgressInfo>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ApiError>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RunCommandDocument {
#[serde(flatten)]
pub run_command_document_base: RunCommandDocumentBase,
pub script: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub parameters: Vec<RunCommandParameterDefinition>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RunCommandDocumentBase {
#[serde(rename = "$schema")]
pub schema: String,
pub id: String,
#[serde(rename = "osType")]
pub os_type: run_command_document_base::OsType,
pub label: String,
pub description: String,
}
pub mod run_command_document_base {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsType {
Windows,
Linux,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RunCommandInput {
#[serde(rename = "commandId")]
pub command_id: String,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub script: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub parameters: Vec<RunCommandInputParameter>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RunCommandInputParameter {
pub name: String,
pub value: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RunCommandListResult {
pub value: Vec<RunCommandDocumentBase>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RunCommandParameterDefinition {
pub name: String,
#[serde(rename = "type")]
pub type_: String,
#[serde(rename = "defaultValue", default, skip_serializing_if = "Option::is_none")]
pub default_value: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub required: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RunCommandResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<InstanceViewStatus>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ScaleInPolicy {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub rules: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ScheduledEventsProfile {
#[serde(rename = "terminateNotificationProfile", default, skip_serializing_if = "Option::is_none")]
pub terminate_notification_profile: Option<TerminateNotificationProfile>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SecurityProfile {
#[serde(rename = "encryptionAtHost", default, skip_serializing_if = "Option::is_none")]
pub encryption_at_host: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ShareInfoElement {
#[serde(rename = "vmUri", default, skip_serializing_if = "Option::is_none")]
pub vm_uri: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Sku {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tier: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub capacity: Option<i64>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Snapshot {
#[serde(flatten)]
pub resource: Resource,
#[serde(rename = "managedBy", default, skip_serializing_if = "Option::is_none")]
pub managed_by: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<SnapshotSku>,
#[serde(rename = "extendedLocation", default, skip_serializing_if = "Option::is_none")]
pub extended_location: Option<ExtendedLocation>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<SnapshotProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SnapshotList {
pub value: Vec<Snapshot>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SnapshotProperties {
#[serde(rename = "timeCreated", default, skip_serializing_if = "Option::is_none")]
pub time_created: Option<String>,
#[serde(rename = "osType", default, skip_serializing_if = "Option::is_none")]
pub os_type: Option<snapshot_properties::OsType>,
#[serde(rename = "hyperVGeneration", default, skip_serializing_if = "Option::is_none")]
pub hyper_v_generation: Option<snapshot_properties::HyperVGeneration>,
#[serde(rename = "purchasePlan", default, skip_serializing_if = "Option::is_none")]
pub purchase_plan: Option<PurchasePlan>,
#[serde(rename = "creationData")]
pub creation_data: CreationData,
#[serde(rename = "diskSizeGB", default, skip_serializing_if = "Option::is_none")]
pub disk_size_gb: Option<i32>,
#[serde(rename = "diskSizeBytes", default, skip_serializing_if = "Option::is_none")]
pub disk_size_bytes: Option<i64>,
#[serde(rename = "diskState", default, skip_serializing_if = "Option::is_none")]
pub disk_state: Option<DiskState>,
#[serde(rename = "uniqueId", default, skip_serializing_if = "Option::is_none")]
pub unique_id: Option<String>,
#[serde(rename = "encryptionSettingsCollection", default, skip_serializing_if = "Option::is_none")]
pub encryption_settings_collection: Option<EncryptionSettingsCollection>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub incremental: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub encryption: Option<Encryption>,
#[serde(rename = "networkAccessPolicy", default, skip_serializing_if = "Option::is_none")]
pub network_access_policy: Option<NetworkAccessPolicy>,
#[serde(rename = "diskAccessId", default, skip_serializing_if = "Option::is_none")]
pub disk_access_id: Option<String>,
}
pub mod snapshot_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsType {
Windows,
Linux,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HyperVGeneration {
V1,
V2,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SnapshotSku {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<snapshot_sku::Name>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tier: Option<String>,
}
pub mod snapshot_sku {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Name {
#[serde(rename = "Standard_LRS")]
StandardLrs,
#[serde(rename = "Premium_LRS")]
PremiumLrs,
#[serde(rename = "Standard_ZRS")]
StandardZrs,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SnapshotUpdate {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<SnapshotUpdateProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<SnapshotSku>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SnapshotUpdateProperties {
#[serde(rename = "osType", default, skip_serializing_if = "Option::is_none")]
pub os_type: Option<snapshot_update_properties::OsType>,
#[serde(rename = "diskSizeGB", default, skip_serializing_if = "Option::is_none")]
pub disk_size_gb: Option<i32>,
#[serde(rename = "encryptionSettingsCollection", default, skip_serializing_if = "Option::is_none")]
pub encryption_settings_collection: Option<EncryptionSettingsCollection>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub encryption: Option<Encryption>,
#[serde(rename = "networkAccessPolicy", default, skip_serializing_if = "Option::is_none")]
pub network_access_policy: Option<NetworkAccessPolicy>,
#[serde(rename = "diskAccessId", default, skip_serializing_if = "Option::is_none")]
pub disk_access_id: Option<String>,
}
pub mod snapshot_update_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsType {
Windows,
Linux,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SourceVault {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SshConfiguration {
#[serde(rename = "publicKeys", default, skip_serializing_if = "Vec::is_empty")]
pub public_keys: Vec<SshPublicKey>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SshPublicKey {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub path: Option<String>,
#[serde(rename = "keyData", default, skip_serializing_if = "Option::is_none")]
pub key_data: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SshPublicKeyGenerateKeyPairResult {
#[serde(rename = "privateKey")]
pub private_key: String,
#[serde(rename = "publicKey")]
pub public_key: String,
pub id: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SshPublicKeyResource {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<SshPublicKeyResourceProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SshPublicKeyResourceProperties {
#[serde(rename = "publicKey", default, skip_serializing_if = "Option::is_none")]
pub public_key: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SshPublicKeyUpdateResource {
#[serde(flatten)]
pub update_resource: UpdateResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<SshPublicKeyResourceProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SshPublicKeysGroupListResult {
pub value: Vec<SshPublicKeyResource>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct StatusCodeCount {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub count: Option<i32>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum StorageAccountType {
#[serde(rename = "Standard_LRS")]
StandardLrs,
#[serde(rename = "Premium_LRS")]
PremiumLrs,
#[serde(rename = "StandardSSD_LRS")]
StandardSsdLrs,
#[serde(rename = "UltraSSD_LRS")]
UltraSsdLrs,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct StorageProfile {
#[serde(rename = "imageReference", default, skip_serializing_if = "Option::is_none")]
pub image_reference: Option<ImageReference>,
#[serde(rename = "osDisk", default, skip_serializing_if = "Option::is_none")]
pub os_disk: Option<OsDisk>,
#[serde(rename = "dataDisks", default, skip_serializing_if = "Vec::is_empty")]
pub data_disks: Vec<DataDisk>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SubResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SubResourceReadOnly {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SubResourceWithColocationStatus {
#[serde(flatten)]
pub sub_resource: SubResource,
#[serde(rename = "colocationStatus", default, skip_serializing_if = "Option::is_none")]
pub colocation_status: Option<InstanceViewStatus>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TargetRegion {
pub name: String,
#[serde(rename = "regionalReplicaCount", default, skip_serializing_if = "Option::is_none")]
pub regional_replica_count: Option<i32>,
#[serde(rename = "storageAccountType", default, skip_serializing_if = "Option::is_none")]
pub storage_account_type: Option<target_region::StorageAccountType>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub encryption: Option<EncryptionImages>,
}
pub mod target_region {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum StorageAccountType {
#[serde(rename = "Standard_LRS")]
StandardLrs,
#[serde(rename = "Standard_ZRS")]
StandardZrs,
#[serde(rename = "Premium_LRS")]
PremiumLrs,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TerminateNotificationProfile {
#[serde(rename = "notBeforeTimeout", default, skip_serializing_if = "Option::is_none")]
pub not_before_timeout: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub enable: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ThrottledRequestsInput {
#[serde(flatten)]
pub log_analytics_input_base: LogAnalyticsInputBase,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpdateDomain {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpdateDomainListResult {
pub value: Vec<UpdateDomain>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpdateResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpdateResourceDefinition {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpgradeOperationHistoricalStatusInfo {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<UpgradeOperationHistoricalStatusInfoProperties>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpgradeOperationHistoricalStatusInfoProperties {
#[serde(rename = "runningStatus", default, skip_serializing_if = "Option::is_none")]
pub running_status: Option<UpgradeOperationHistoryStatus>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub progress: Option<RollingUpgradeProgressInfo>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ApiError>,
#[serde(rename = "startedBy", default, skip_serializing_if = "Option::is_none")]
pub started_by: Option<upgrade_operation_historical_status_info_properties::StartedBy>,
#[serde(rename = "targetImageReference", default, skip_serializing_if = "Option::is_none")]
pub target_image_reference: Option<ImageReference>,
#[serde(rename = "rollbackInfo", default, skip_serializing_if = "Option::is_none")]
pub rollback_info: Option<RollbackStatusInfo>,
}
pub mod upgrade_operation_historical_status_info_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum StartedBy {
Unknown,
User,
Platform,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpgradeOperationHistoryStatus {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<upgrade_operation_history_status::Code>,
#[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")]
pub start_time: Option<String>,
#[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")]
pub end_time: Option<String>,
}
pub mod upgrade_operation_history_status {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Code {
RollingForward,
Cancelled,
Completed,
Faulted,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpgradePolicy {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub mode: Option<upgrade_policy::Mode>,
#[serde(rename = "rollingUpgradePolicy", default, skip_serializing_if = "Option::is_none")]
pub rolling_upgrade_policy: Option<RollingUpgradePolicy>,
#[serde(rename = "automaticOSUpgradePolicy", default, skip_serializing_if = "Option::is_none")]
pub automatic_os_upgrade_policy: Option<AutomaticOsUpgradePolicy>,
}
pub mod upgrade_policy {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Mode {
Automatic,
Manual,
Rolling,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Usage {
pub unit: usage::Unit,
#[serde(rename = "currentValue")]
pub current_value: i32,
pub limit: i64,
pub name: UsageName,
}
pub mod usage {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Unit {
Count,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UsageName {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
#[serde(rename = "localizedValue", default, skip_serializing_if = "Option::is_none")]
pub localized_value: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UserArtifactManage {
pub install: String,
pub remove: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub update: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UserArtifactSource {
#[serde(rename = "mediaLink")]
pub media_link: String,
#[serde(rename = "defaultConfigurationLink", default, skip_serializing_if = "Option::is_none")]
pub default_configuration_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VmScaleSetConvertToSinglePlacementGroupInput {
#[serde(rename = "activePlacementGroupId", default, skip_serializing_if = "Option::is_none")]
pub active_placement_group_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VaultCertificate {
#[serde(rename = "certificateUrl", default, skip_serializing_if = "Option::is_none")]
pub certificate_url: Option<String>,
#[serde(rename = "certificateStore", default, skip_serializing_if = "Option::is_none")]
pub certificate_store: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VaultSecretGroup {
#[serde(rename = "sourceVault", default, skip_serializing_if = "Option::is_none")]
pub source_vault: Option<SubResource>,
#[serde(rename = "vaultCertificates", default, skip_serializing_if = "Vec::is_empty")]
pub vault_certificates: Vec<VaultCertificate>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualHardDisk {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub uri: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachine {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub plan: Option<Plan>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineProperties>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub resources: Vec<VirtualMachineExtension>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<VirtualMachineIdentity>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub zones: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineAgentInstanceView {
#[serde(rename = "vmAgentVersion", default, skip_serializing_if = "Option::is_none")]
pub vm_agent_version: Option<String>,
#[serde(rename = "extensionHandlers", default, skip_serializing_if = "Vec::is_empty")]
pub extension_handlers: Vec<VirtualMachineExtensionHandlerInstanceView>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub statuses: Vec<InstanceViewStatus>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineAssessPatchesResult {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<virtual_machine_assess_patches_result::Status>,
#[serde(rename = "assessmentActivityId", default, skip_serializing_if = "Option::is_none")]
pub assessment_activity_id: Option<String>,
#[serde(rename = "rebootPending", default, skip_serializing_if = "Option::is_none")]
pub reboot_pending: Option<bool>,
#[serde(rename = "criticalAndSecurityPatchCount", default, skip_serializing_if = "Option::is_none")]
pub critical_and_security_patch_count: Option<i32>,
#[serde(rename = "otherPatchCount", default, skip_serializing_if = "Option::is_none")]
pub other_patch_count: Option<i32>,
#[serde(rename = "startDateTime", default, skip_serializing_if = "Option::is_none")]
pub start_date_time: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub patches: Vec<VirtualMachineSoftwarePatchProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ApiError>,
}
pub mod virtual_machine_assess_patches_result {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
InProgress,
Failed,
Succeeded,
CompletedWithWarnings,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineCaptureParameters {
#[serde(rename = "vhdPrefix")]
pub vhd_prefix: String,
#[serde(rename = "destinationContainerName")]
pub destination_container_name: String,
#[serde(rename = "overwriteVhds")]
pub overwrite_vhds: bool,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineCaptureResult {
#[serde(flatten)]
pub sub_resource: SubResource,
#[serde(rename = "$schema", default, skip_serializing_if = "Option::is_none")]
pub schema: Option<String>,
#[serde(rename = "contentVersion", default, skip_serializing_if = "Option::is_none")]
pub content_version: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub parameters: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub resources: Vec<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineExtension {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineExtensionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineExtensionHandlerInstanceView {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(rename = "typeHandlerVersion", default, skip_serializing_if = "Option::is_none")]
pub type_handler_version: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<InstanceViewStatus>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineExtensionImage {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineExtensionImageProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineExtensionImageProperties {
#[serde(rename = "operatingSystem")]
pub operating_system: String,
#[serde(rename = "computeRole")]
pub compute_role: String,
#[serde(rename = "handlerSchema")]
pub handler_schema: String,
#[serde(rename = "vmScaleSetEnabled", default, skip_serializing_if = "Option::is_none")]
pub vm_scale_set_enabled: Option<bool>,
#[serde(rename = "supportsMultipleExtensions", default, skip_serializing_if = "Option::is_none")]
pub supports_multiple_extensions: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineExtensionInstanceView {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(rename = "typeHandlerVersion", default, skip_serializing_if = "Option::is_none")]
pub type_handler_version: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub substatuses: Vec<InstanceViewStatus>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub statuses: Vec<InstanceViewStatus>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineExtensionProperties {
#[serde(rename = "forceUpdateTag", default, skip_serializing_if = "Option::is_none")]
pub force_update_tag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub publisher: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(rename = "typeHandlerVersion", default, skip_serializing_if = "Option::is_none")]
pub type_handler_version: Option<String>,
#[serde(rename = "autoUpgradeMinorVersion", default, skip_serializing_if = "Option::is_none")]
pub auto_upgrade_minor_version: Option<bool>,
#[serde(rename = "enableAutomaticUpgrade", default, skip_serializing_if = "Option::is_none")]
pub enable_automatic_upgrade: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub settings: Option<serde_json::Value>,
#[serde(rename = "protectedSettings", default, skip_serializing_if = "Option::is_none")]
pub protected_settings: Option<serde_json::Value>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "instanceView", default, skip_serializing_if = "Option::is_none")]
pub instance_view: Option<VirtualMachineExtensionInstanceView>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineExtensionUpdate {
#[serde(flatten)]
pub update_resource: UpdateResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineExtensionUpdateProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineExtensionUpdateProperties {
#[serde(rename = "forceUpdateTag", default, skip_serializing_if = "Option::is_none")]
pub force_update_tag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub publisher: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(rename = "typeHandlerVersion", default, skip_serializing_if = "Option::is_none")]
pub type_handler_version: Option<String>,
#[serde(rename = "autoUpgradeMinorVersion", default, skip_serializing_if = "Option::is_none")]
pub auto_upgrade_minor_version: Option<bool>,
#[serde(rename = "enableAutomaticUpgrade", default, skip_serializing_if = "Option::is_none")]
pub enable_automatic_upgrade: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub settings: Option<serde_json::Value>,
#[serde(rename = "protectedSettings", default, skip_serializing_if = "Option::is_none")]
pub protected_settings: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineExtensionsListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<VirtualMachineExtension>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineHealthStatus {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<InstanceViewStatus>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineIdentity {
#[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")]
pub principal_id: Option<String>,
#[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")]
pub tenant_id: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<virtual_machine_identity::Type>,
#[serde(rename = "userAssignedIdentities", default, skip_serializing_if = "Option::is_none")]
pub user_assigned_identities: Option<serde_json::Value>,
}
pub mod virtual_machine_identity {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
SystemAssigned,
UserAssigned,
#[serde(rename = "SystemAssigned, UserAssigned")]
SystemAssignedUserAssigned,
None,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineImage {
#[serde(flatten)]
pub virtual_machine_image_resource: VirtualMachineImageResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineImageProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineImageProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub plan: Option<PurchasePlan>,
#[serde(rename = "osDiskImage", default, skip_serializing_if = "Option::is_none")]
pub os_disk_image: Option<OsDiskImage>,
#[serde(rename = "dataDiskImages", default, skip_serializing_if = "Vec::is_empty")]
pub data_disk_images: Vec<DataDiskImage>,
#[serde(rename = "automaticOSUpgradeProperties", default, skip_serializing_if = "Option::is_none")]
pub automatic_os_upgrade_properties: Option<AutomaticOsUpgradeProperties>,
#[serde(rename = "hyperVGeneration", default, skip_serializing_if = "Option::is_none")]
pub hyper_v_generation: Option<HyperVGenerationType>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub disallowed: Option<DisallowedConfiguration>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineImageResource {
#[serde(flatten)]
pub sub_resource: SubResource,
pub name: String,
pub location: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineInstanceView {
#[serde(rename = "platformUpdateDomain", default, skip_serializing_if = "Option::is_none")]
pub platform_update_domain: Option<i32>,
#[serde(rename = "platformFaultDomain", default, skip_serializing_if = "Option::is_none")]
pub platform_fault_domain: Option<i32>,
#[serde(rename = "computerName", default, skip_serializing_if = "Option::is_none")]
pub computer_name: Option<String>,
#[serde(rename = "osName", default, skip_serializing_if = "Option::is_none")]
pub os_name: Option<String>,
#[serde(rename = "osVersion", default, skip_serializing_if = "Option::is_none")]
pub os_version: Option<String>,
#[serde(rename = "hyperVGeneration", default, skip_serializing_if = "Option::is_none")]
pub hyper_v_generation: Option<virtual_machine_instance_view::HyperVGeneration>,
#[serde(rename = "rdpThumbPrint", default, skip_serializing_if = "Option::is_none")]
pub rdp_thumb_print: Option<String>,
#[serde(rename = "vmAgent", default, skip_serializing_if = "Option::is_none")]
pub vm_agent: Option<VirtualMachineAgentInstanceView>,
#[serde(rename = "maintenanceRedeployStatus", default, skip_serializing_if = "Option::is_none")]
pub maintenance_redeploy_status: Option<MaintenanceRedeployStatus>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub disks: Vec<DiskInstanceView>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub extensions: Vec<VirtualMachineExtensionInstanceView>,
#[serde(rename = "vmHealth", default, skip_serializing_if = "Option::is_none")]
pub vm_health: Option<VirtualMachineHealthStatus>,
#[serde(rename = "bootDiagnostics", default, skip_serializing_if = "Option::is_none")]
pub boot_diagnostics: Option<BootDiagnosticsInstanceView>,
#[serde(rename = "assignedHost", default, skip_serializing_if = "Option::is_none")]
pub assigned_host: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub statuses: Vec<InstanceViewStatus>,
#[serde(rename = "patchStatus", default, skip_serializing_if = "Option::is_none")]
pub patch_status: Option<VirtualMachinePatchStatus>,
}
pub mod virtual_machine_instance_view {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HyperVGeneration {
V1,
V2,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineListResult {
pub value: Vec<VirtualMachine>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachinePatchStatus {
#[serde(rename = "availablePatchSummary", default, skip_serializing_if = "Option::is_none")]
pub available_patch_summary: Option<AvailablePatchSummary>,
#[serde(rename = "lastPatchInstallationSummary", default, skip_serializing_if = "Option::is_none")]
pub last_patch_installation_summary: Option<LastPatchInstallationSummary>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineProperties {
#[serde(rename = "hardwareProfile", default, skip_serializing_if = "Option::is_none")]
pub hardware_profile: Option<HardwareProfile>,
#[serde(rename = "storageProfile", default, skip_serializing_if = "Option::is_none")]
pub storage_profile: Option<StorageProfile>,
#[serde(rename = "additionalCapabilities", default, skip_serializing_if = "Option::is_none")]
pub additional_capabilities: Option<AdditionalCapabilities>,
#[serde(rename = "osProfile", default, skip_serializing_if = "Option::is_none")]
pub os_profile: Option<OsProfile>,
#[serde(rename = "networkProfile", default, skip_serializing_if = "Option::is_none")]
pub network_profile: Option<NetworkProfile>,
#[serde(rename = "securityProfile", default, skip_serializing_if = "Option::is_none")]
pub security_profile: Option<SecurityProfile>,
#[serde(rename = "diagnosticsProfile", default, skip_serializing_if = "Option::is_none")]
pub diagnostics_profile: Option<DiagnosticsProfile>,
#[serde(rename = "availabilitySet", default, skip_serializing_if = "Option::is_none")]
pub availability_set: Option<SubResource>,
#[serde(rename = "virtualMachineScaleSet", default, skip_serializing_if = "Option::is_none")]
pub virtual_machine_scale_set: Option<SubResource>,
#[serde(rename = "proximityPlacementGroup", default, skip_serializing_if = "Option::is_none")]
pub proximity_placement_group: Option<SubResource>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub priority: Option<Priority>,
#[serde(rename = "evictionPolicy", default, skip_serializing_if = "Option::is_none")]
pub eviction_policy: Option<EvictionPolicy>,
#[serde(rename = "billingProfile", default, skip_serializing_if = "Option::is_none")]
pub billing_profile: Option<BillingProfile>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub host: Option<SubResource>,
#[serde(rename = "hostGroup", default, skip_serializing_if = "Option::is_none")]
pub host_group: Option<SubResource>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "instanceView", default, skip_serializing_if = "Option::is_none")]
pub instance_view: Option<VirtualMachineInstanceView>,
#[serde(rename = "licenseType", default, skip_serializing_if = "Option::is_none")]
pub license_type: Option<String>,
#[serde(rename = "vmId", default, skip_serializing_if = "Option::is_none")]
pub vm_id: Option<String>,
#[serde(rename = "extensionsTimeBudget", default, skip_serializing_if = "Option::is_none")]
pub extensions_time_budget: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineReimageParameters {
#[serde(rename = "tempDisk", default, skip_serializing_if = "Option::is_none")]
pub temp_disk: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineRunCommand {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineRunCommandProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineRunCommandInstanceView {
#[serde(rename = "executionState", default, skip_serializing_if = "Option::is_none")]
pub execution_state: Option<virtual_machine_run_command_instance_view::ExecutionState>,
#[serde(rename = "executionMessage", default, skip_serializing_if = "Option::is_none")]
pub execution_message: Option<String>,
#[serde(rename = "exitCode", default, skip_serializing_if = "Option::is_none")]
pub exit_code: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub output: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<String>,
#[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")]
pub start_time: Option<String>,
#[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")]
pub end_time: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub statuses: Vec<InstanceViewStatus>,
}
pub mod virtual_machine_run_command_instance_view {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ExecutionState {
Unknown,
Pending,
Running,
Failed,
Succeeded,
TimedOut,
Canceled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineRunCommandProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub source: Option<VirtualMachineRunCommandScriptSource>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub parameters: Vec<RunCommandInputParameter>,
#[serde(rename = "protectedParameters", default, skip_serializing_if = "Vec::is_empty")]
pub protected_parameters: Vec<RunCommandInputParameter>,
#[serde(rename = "asyncExecution", default, skip_serializing_if = "Option::is_none")]
pub async_execution: Option<bool>,
#[serde(rename = "runAsUser", default, skip_serializing_if = "Option::is_none")]
pub run_as_user: Option<String>,
#[serde(rename = "runAsPassword", default, skip_serializing_if = "Option::is_none")]
pub run_as_password: Option<String>,
#[serde(rename = "timeoutInSeconds", default, skip_serializing_if = "Option::is_none")]
pub timeout_in_seconds: Option<i32>,
#[serde(rename = "outputBlobUri", default, skip_serializing_if = "Option::is_none")]
pub output_blob_uri: Option<String>,
#[serde(rename = "errorBlobUri", default, skip_serializing_if = "Option::is_none")]
pub error_blob_uri: Option<String>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "instanceView", default, skip_serializing_if = "Option::is_none")]
pub instance_view: Option<VirtualMachineRunCommandInstanceView>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineRunCommandScriptSource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub script: Option<String>,
#[serde(rename = "scriptUri", default, skip_serializing_if = "Option::is_none")]
pub script_uri: Option<String>,
#[serde(rename = "commandId", default, skip_serializing_if = "Option::is_none")]
pub command_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineRunCommandUpdate {
#[serde(flatten)]
pub update_resource: UpdateResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineRunCommandProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineRunCommandsListResult {
pub value: Vec<VirtualMachineRunCommand>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSet {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<Sku>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub plan: Option<Plan>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineScaleSetProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<VirtualMachineScaleSetIdentity>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub zones: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetDataDisk {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
pub lun: i32,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub caching: Option<Caching>,
#[serde(rename = "writeAcceleratorEnabled", default, skip_serializing_if = "Option::is_none")]
pub write_accelerator_enabled: Option<bool>,
#[serde(rename = "createOption")]
pub create_option: CreateOption,
#[serde(rename = "diskSizeGB", default, skip_serializing_if = "Option::is_none")]
pub disk_size_gb: Option<i32>,
#[serde(rename = "managedDisk", default, skip_serializing_if = "Option::is_none")]
pub managed_disk: Option<VirtualMachineScaleSetManagedDiskParameters>,
#[serde(rename = "diskIOPSReadWrite", default, skip_serializing_if = "Option::is_none")]
pub disk_iops_read_write: Option<i64>,
#[serde(rename = "diskMBpsReadWrite", default, skip_serializing_if = "Option::is_none")]
pub disk_m_bps_read_write: Option<i64>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetExtension {
#[serde(flatten)]
pub sub_resource_read_only: SubResourceReadOnly,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineScaleSetExtensionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetExtensionListResult {
pub value: Vec<VirtualMachineScaleSetExtension>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetExtensionProfile {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub extensions: Vec<VirtualMachineScaleSetExtension>,
#[serde(rename = "extensionsTimeBudget", default, skip_serializing_if = "Option::is_none")]
pub extensions_time_budget: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetExtensionProperties {
#[serde(rename = "forceUpdateTag", default, skip_serializing_if = "Option::is_none")]
pub force_update_tag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub publisher: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(rename = "typeHandlerVersion", default, skip_serializing_if = "Option::is_none")]
pub type_handler_version: Option<String>,
#[serde(rename = "autoUpgradeMinorVersion", default, skip_serializing_if = "Option::is_none")]
pub auto_upgrade_minor_version: Option<bool>,
#[serde(rename = "enableAutomaticUpgrade", default, skip_serializing_if = "Option::is_none")]
pub enable_automatic_upgrade: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub settings: Option<serde_json::Value>,
#[serde(rename = "protectedSettings", default, skip_serializing_if = "Option::is_none")]
pub protected_settings: Option<serde_json::Value>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "provisionAfterExtensions", default, skip_serializing_if = "Vec::is_empty")]
pub provision_after_extensions: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetExtensionUpdate {
#[serde(flatten)]
pub sub_resource_read_only: SubResourceReadOnly,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineScaleSetExtensionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetIpConfiguration {
#[serde(flatten)]
pub sub_resource: SubResource,
pub name: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineScaleSetIpConfigurationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetIpConfigurationProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub subnet: Option<ApiEntityReference>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub primary: Option<bool>,
#[serde(rename = "publicIPAddressConfiguration", default, skip_serializing_if = "Option::is_none")]
pub public_ip_address_configuration: Option<VirtualMachineScaleSetPublicIpAddressConfiguration>,
#[serde(rename = "privateIPAddressVersion", default, skip_serializing_if = "Option::is_none")]
pub private_ip_address_version: Option<virtual_machine_scale_set_ip_configuration_properties::PrivateIpAddressVersion>,
#[serde(rename = "applicationGatewayBackendAddressPools", default, skip_serializing_if = "Vec::is_empty")]
pub application_gateway_backend_address_pools: Vec<SubResource>,
#[serde(rename = "applicationSecurityGroups", default, skip_serializing_if = "Vec::is_empty")]
pub application_security_groups: Vec<SubResource>,
#[serde(rename = "loadBalancerBackendAddressPools", default, skip_serializing_if = "Vec::is_empty")]
pub load_balancer_backend_address_pools: Vec<SubResource>,
#[serde(rename = "loadBalancerInboundNatPools", default, skip_serializing_if = "Vec::is_empty")]
pub load_balancer_inbound_nat_pools: Vec<SubResource>,
}
pub mod virtual_machine_scale_set_ip_configuration_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PrivateIpAddressVersion {
IPv4,
IPv6,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetIdentity {
#[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")]
pub principal_id: Option<String>,
#[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")]
pub tenant_id: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<virtual_machine_scale_set_identity::Type>,
#[serde(rename = "userAssignedIdentities", default, skip_serializing_if = "Option::is_none")]
pub user_assigned_identities: Option<serde_json::Value>,
}
pub mod virtual_machine_scale_set_identity {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
SystemAssigned,
UserAssigned,
#[serde(rename = "SystemAssigned, UserAssigned")]
SystemAssignedUserAssigned,
None,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetInstanceView {
#[serde(rename = "virtualMachine", default, skip_serializing_if = "Option::is_none")]
pub virtual_machine: Option<VirtualMachineScaleSetInstanceViewStatusesSummary>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub extensions: Vec<VirtualMachineScaleSetVmExtensionsSummary>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub statuses: Vec<InstanceViewStatus>,
#[serde(rename = "orchestrationServices", default, skip_serializing_if = "Vec::is_empty")]
pub orchestration_services: Vec<OrchestrationServiceSummary>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetInstanceViewStatusesSummary {
#[serde(rename = "statusesSummary", default, skip_serializing_if = "Vec::is_empty")]
pub statuses_summary: Vec<VirtualMachineStatusCodeCount>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetIpTag {
#[serde(rename = "ipTagType", default, skip_serializing_if = "Option::is_none")]
pub ip_tag_type: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tag: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetListOsUpgradeHistory {
pub value: Vec<UpgradeOperationHistoricalStatusInfo>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetListResult {
pub value: Vec<VirtualMachineScaleSet>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetListSkusResult {
pub value: Vec<VirtualMachineScaleSetSku>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetListWithLinkResult {
pub value: Vec<VirtualMachineScaleSet>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetManagedDiskParameters {
#[serde(rename = "storageAccountType", default, skip_serializing_if = "Option::is_none")]
pub storage_account_type: Option<StorageAccountType>,
#[serde(rename = "diskEncryptionSet", default, skip_serializing_if = "Option::is_none")]
pub disk_encryption_set: Option<DiskEncryptionSetParameters>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetNetworkConfiguration {
#[serde(flatten)]
pub sub_resource: SubResource,
pub name: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineScaleSetNetworkConfigurationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetNetworkConfigurationDnsSettings {
#[serde(rename = "dnsServers", default, skip_serializing_if = "Vec::is_empty")]
pub dns_servers: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetNetworkConfigurationProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub primary: Option<bool>,
#[serde(rename = "enableAcceleratedNetworking", default, skip_serializing_if = "Option::is_none")]
pub enable_accelerated_networking: Option<bool>,
#[serde(rename = "enableFpga", default, skip_serializing_if = "Option::is_none")]
pub enable_fpga: Option<bool>,
#[serde(rename = "networkSecurityGroup", default, skip_serializing_if = "Option::is_none")]
pub network_security_group: Option<SubResource>,
#[serde(rename = "dnsSettings", default, skip_serializing_if = "Option::is_none")]
pub dns_settings: Option<VirtualMachineScaleSetNetworkConfigurationDnsSettings>,
#[serde(rename = "ipConfigurations")]
pub ip_configurations: Vec<VirtualMachineScaleSetIpConfiguration>,
#[serde(rename = "enableIPForwarding", default, skip_serializing_if = "Option::is_none")]
pub enable_ip_forwarding: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetNetworkProfile {
#[serde(rename = "healthProbe", default, skip_serializing_if = "Option::is_none")]
pub health_probe: Option<ApiEntityReference>,
#[serde(rename = "networkInterfaceConfigurations", default, skip_serializing_if = "Vec::is_empty")]
pub network_interface_configurations: Vec<VirtualMachineScaleSetNetworkConfiguration>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetOsDisk {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub caching: Option<Caching>,
#[serde(rename = "writeAcceleratorEnabled", default, skip_serializing_if = "Option::is_none")]
pub write_accelerator_enabled: Option<bool>,
#[serde(rename = "createOption")]
pub create_option: CreateOption,
#[serde(rename = "diffDiskSettings", default, skip_serializing_if = "Option::is_none")]
pub diff_disk_settings: Option<DiffDiskSettings>,
#[serde(rename = "diskSizeGB", default, skip_serializing_if = "Option::is_none")]
pub disk_size_gb: Option<i32>,
#[serde(rename = "osType", default, skip_serializing_if = "Option::is_none")]
pub os_type: Option<virtual_machine_scale_set_os_disk::OsType>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub image: Option<VirtualHardDisk>,
#[serde(rename = "vhdContainers", default, skip_serializing_if = "Vec::is_empty")]
pub vhd_containers: Vec<String>,
#[serde(rename = "managedDisk", default, skip_serializing_if = "Option::is_none")]
pub managed_disk: Option<VirtualMachineScaleSetManagedDiskParameters>,
}
pub mod virtual_machine_scale_set_os_disk {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OsType {
Windows,
Linux,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetOsProfile {
#[serde(rename = "computerNamePrefix", default, skip_serializing_if = "Option::is_none")]
pub computer_name_prefix: Option<String>,
#[serde(rename = "adminUsername", default, skip_serializing_if = "Option::is_none")]
pub admin_username: Option<String>,
#[serde(rename = "adminPassword", default, skip_serializing_if = "Option::is_none")]
pub admin_password: Option<String>,
#[serde(rename = "customData", default, skip_serializing_if = "Option::is_none")]
pub custom_data: Option<String>,
#[serde(rename = "windowsConfiguration", default, skip_serializing_if = "Option::is_none")]
pub windows_configuration: Option<WindowsConfiguration>,
#[serde(rename = "linuxConfiguration", default, skip_serializing_if = "Option::is_none")]
pub linux_configuration: Option<LinuxConfiguration>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub secrets: Vec<VaultSecretGroup>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetProperties {
#[serde(rename = "upgradePolicy", default, skip_serializing_if = "Option::is_none")]
pub upgrade_policy: Option<UpgradePolicy>,
#[serde(rename = "automaticRepairsPolicy", default, skip_serializing_if = "Option::is_none")]
pub automatic_repairs_policy: Option<AutomaticRepairsPolicy>,
#[serde(rename = "virtualMachineProfile", default, skip_serializing_if = "Option::is_none")]
pub virtual_machine_profile: Option<VirtualMachineScaleSetVmProfile>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub overprovision: Option<bool>,
#[serde(
rename = "doNotRunExtensionsOnOverprovisionedVMs",
default,
skip_serializing_if = "Option::is_none"
)]
pub do_not_run_extensions_on_overprovisioned_v_ms: Option<bool>,
#[serde(rename = "uniqueId", default, skip_serializing_if = "Option::is_none")]
pub unique_id: Option<String>,
#[serde(rename = "singlePlacementGroup", default, skip_serializing_if = "Option::is_none")]
pub single_placement_group: Option<bool>,
#[serde(rename = "zoneBalance", default, skip_serializing_if = "Option::is_none")]
pub zone_balance: Option<bool>,
#[serde(rename = "platformFaultDomainCount", default, skip_serializing_if = "Option::is_none")]
pub platform_fault_domain_count: Option<i32>,
#[serde(rename = "proximityPlacementGroup", default, skip_serializing_if = "Option::is_none")]
pub proximity_placement_group: Option<SubResource>,
#[serde(rename = "hostGroup", default, skip_serializing_if = "Option::is_none")]
pub host_group: Option<SubResource>,
#[serde(rename = "additionalCapabilities", default, skip_serializing_if = "Option::is_none")]
pub additional_capabilities: Option<AdditionalCapabilities>,
#[serde(rename = "scaleInPolicy", default, skip_serializing_if = "Option::is_none")]
pub scale_in_policy: Option<ScaleInPolicy>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetPublicIpAddressConfiguration {
pub name: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineScaleSetPublicIpAddressConfigurationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetPublicIpAddressConfigurationDnsSettings {
#[serde(rename = "domainNameLabel")]
pub domain_name_label: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetPublicIpAddressConfigurationProperties {
#[serde(rename = "idleTimeoutInMinutes", default, skip_serializing_if = "Option::is_none")]
pub idle_timeout_in_minutes: Option<i32>,
#[serde(rename = "dnsSettings", default, skip_serializing_if = "Option::is_none")]
pub dns_settings: Option<VirtualMachineScaleSetPublicIpAddressConfigurationDnsSettings>,
#[serde(rename = "ipTags", default, skip_serializing_if = "Vec::is_empty")]
pub ip_tags: Vec<VirtualMachineScaleSetIpTag>,
#[serde(rename = "publicIPPrefix", default, skip_serializing_if = "Option::is_none")]
pub public_ip_prefix: Option<SubResource>,
#[serde(rename = "publicIPAddressVersion", default, skip_serializing_if = "Option::is_none")]
pub public_ip_address_version: Option<virtual_machine_scale_set_public_ip_address_configuration_properties::PublicIpAddressVersion>,
}
pub mod virtual_machine_scale_set_public_ip_address_configuration_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PublicIpAddressVersion {
IPv4,
IPv6,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetReimageParameters {
#[serde(flatten)]
pub virtual_machine_scale_set_vm_reimage_parameters: VirtualMachineScaleSetVmReimageParameters,
#[serde(rename = "instanceIds", default, skip_serializing_if = "Vec::is_empty")]
pub instance_ids: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetSku {
#[serde(rename = "resourceType", default, skip_serializing_if = "Option::is_none")]
pub resource_type: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<Sku>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub capacity: Option<VirtualMachineScaleSetSkuCapacity>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetSkuCapacity {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub minimum: Option<i64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub maximum: Option<i64>,
#[serde(rename = "defaultCapacity", default, skip_serializing_if = "Option::is_none")]
pub default_capacity: Option<i64>,
#[serde(rename = "scaleType", default, skip_serializing_if = "Option::is_none")]
pub scale_type: Option<virtual_machine_scale_set_sku_capacity::ScaleType>,
}
pub mod virtual_machine_scale_set_sku_capacity {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ScaleType {
Automatic,
None,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetStorageProfile {
#[serde(rename = "imageReference", default, skip_serializing_if = "Option::is_none")]
pub image_reference: Option<ImageReference>,
#[serde(rename = "osDisk", default, skip_serializing_if = "Option::is_none")]
pub os_disk: Option<VirtualMachineScaleSetOsDisk>,
#[serde(rename = "dataDisks", default, skip_serializing_if = "Vec::is_empty")]
pub data_disks: Vec<VirtualMachineScaleSetDataDisk>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetUpdate {
#[serde(flatten)]
pub update_resource: UpdateResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<Sku>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub plan: Option<Plan>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineScaleSetUpdateProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<VirtualMachineScaleSetIdentity>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetUpdateIpConfiguration {
#[serde(flatten)]
pub sub_resource: SubResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineScaleSetUpdateIpConfigurationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetUpdateIpConfigurationProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub subnet: Option<ApiEntityReference>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub primary: Option<bool>,
#[serde(rename = "publicIPAddressConfiguration", default, skip_serializing_if = "Option::is_none")]
pub public_ip_address_configuration: Option<VirtualMachineScaleSetUpdatePublicIpAddressConfiguration>,
#[serde(rename = "privateIPAddressVersion", default, skip_serializing_if = "Option::is_none")]
pub private_ip_address_version: Option<virtual_machine_scale_set_update_ip_configuration_properties::PrivateIpAddressVersion>,
#[serde(rename = "applicationGatewayBackendAddressPools", default, skip_serializing_if = "Vec::is_empty")]
pub application_gateway_backend_address_pools: Vec<SubResource>,
#[serde(rename = "applicationSecurityGroups", default, skip_serializing_if = "Vec::is_empty")]
pub application_security_groups: Vec<SubResource>,
#[serde(rename = "loadBalancerBackendAddressPools", default, skip_serializing_if = "Vec::is_empty")]
pub load_balancer_backend_address_pools: Vec<SubResource>,
#[serde(rename = "loadBalancerInboundNatPools", default, skip_serializing_if = "Vec::is_empty")]
pub load_balancer_inbound_nat_pools: Vec<SubResource>,
}
pub mod virtual_machine_scale_set_update_ip_configuration_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PrivateIpAddressVersion {
IPv4,
IPv6,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetUpdateNetworkConfiguration {
#[serde(flatten)]
pub sub_resource: SubResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineScaleSetUpdateNetworkConfigurationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetUpdateNetworkConfigurationProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub primary: Option<bool>,
#[serde(rename = "enableAcceleratedNetworking", default, skip_serializing_if = "Option::is_none")]
pub enable_accelerated_networking: Option<bool>,
#[serde(rename = "enableFpga", default, skip_serializing_if = "Option::is_none")]
pub enable_fpga: Option<bool>,
#[serde(rename = "networkSecurityGroup", default, skip_serializing_if = "Option::is_none")]
pub network_security_group: Option<SubResource>,
#[serde(rename = "dnsSettings", default, skip_serializing_if = "Option::is_none")]
pub dns_settings: Option<VirtualMachineScaleSetNetworkConfigurationDnsSettings>,
#[serde(rename = "ipConfigurations", default, skip_serializing_if = "Vec::is_empty")]
pub ip_configurations: Vec<VirtualMachineScaleSetUpdateIpConfiguration>,
#[serde(rename = "enableIPForwarding", default, skip_serializing_if = "Option::is_none")]
pub enable_ip_forwarding: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetUpdateNetworkProfile {
#[serde(rename = "healthProbe", default, skip_serializing_if = "Option::is_none")]
pub health_probe: Option<ApiEntityReference>,
#[serde(rename = "networkInterfaceConfigurations", default, skip_serializing_if = "Vec::is_empty")]
pub network_interface_configurations: Vec<VirtualMachineScaleSetUpdateNetworkConfiguration>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetUpdateOsDisk {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub caching: Option<Caching>,
#[serde(rename = "writeAcceleratorEnabled", default, skip_serializing_if = "Option::is_none")]
pub write_accelerator_enabled: Option<bool>,
#[serde(rename = "diskSizeGB", default, skip_serializing_if = "Option::is_none")]
pub disk_size_gb: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub image: Option<VirtualHardDisk>,
#[serde(rename = "vhdContainers", default, skip_serializing_if = "Vec::is_empty")]
pub vhd_containers: Vec<String>,
#[serde(rename = "managedDisk", default, skip_serializing_if = "Option::is_none")]
pub managed_disk: Option<VirtualMachineScaleSetManagedDiskParameters>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetUpdateOsProfile {
#[serde(rename = "customData", default, skip_serializing_if = "Option::is_none")]
pub custom_data: Option<String>,
#[serde(rename = "windowsConfiguration", default, skip_serializing_if = "Option::is_none")]
pub windows_configuration: Option<WindowsConfiguration>,
#[serde(rename = "linuxConfiguration", default, skip_serializing_if = "Option::is_none")]
pub linux_configuration: Option<LinuxConfiguration>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub secrets: Vec<VaultSecretGroup>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetUpdateProperties {
#[serde(rename = "upgradePolicy", default, skip_serializing_if = "Option::is_none")]
pub upgrade_policy: Option<UpgradePolicy>,
#[serde(rename = "automaticRepairsPolicy", default, skip_serializing_if = "Option::is_none")]
pub automatic_repairs_policy: Option<AutomaticRepairsPolicy>,
#[serde(rename = "virtualMachineProfile", default, skip_serializing_if = "Option::is_none")]
pub virtual_machine_profile: Option<VirtualMachineScaleSetUpdateVmProfile>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub overprovision: Option<bool>,
#[serde(
rename = "doNotRunExtensionsOnOverprovisionedVMs",
default,
skip_serializing_if = "Option::is_none"
)]
pub do_not_run_extensions_on_overprovisioned_v_ms: Option<bool>,
#[serde(rename = "singlePlacementGroup", default, skip_serializing_if = "Option::is_none")]
pub single_placement_group: Option<bool>,
#[serde(rename = "additionalCapabilities", default, skip_serializing_if = "Option::is_none")]
pub additional_capabilities: Option<AdditionalCapabilities>,
#[serde(rename = "scaleInPolicy", default, skip_serializing_if = "Option::is_none")]
pub scale_in_policy: Option<ScaleInPolicy>,
#[serde(rename = "proximityPlacementGroup", default, skip_serializing_if = "Option::is_none")]
pub proximity_placement_group: Option<SubResource>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetUpdatePublicIpAddressConfiguration {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineScaleSetUpdatePublicIpAddressConfigurationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetUpdatePublicIpAddressConfigurationProperties {
#[serde(rename = "idleTimeoutInMinutes", default, skip_serializing_if = "Option::is_none")]
pub idle_timeout_in_minutes: Option<i32>,
#[serde(rename = "dnsSettings", default, skip_serializing_if = "Option::is_none")]
pub dns_settings: Option<VirtualMachineScaleSetPublicIpAddressConfigurationDnsSettings>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetUpdateStorageProfile {
#[serde(rename = "imageReference", default, skip_serializing_if = "Option::is_none")]
pub image_reference: Option<ImageReference>,
#[serde(rename = "osDisk", default, skip_serializing_if = "Option::is_none")]
pub os_disk: Option<VirtualMachineScaleSetUpdateOsDisk>,
#[serde(rename = "dataDisks", default, skip_serializing_if = "Vec::is_empty")]
pub data_disks: Vec<VirtualMachineScaleSetDataDisk>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetUpdateVmProfile {
#[serde(rename = "osProfile", default, skip_serializing_if = "Option::is_none")]
pub os_profile: Option<VirtualMachineScaleSetUpdateOsProfile>,
#[serde(rename = "storageProfile", default, skip_serializing_if = "Option::is_none")]
pub storage_profile: Option<VirtualMachineScaleSetUpdateStorageProfile>,
#[serde(rename = "networkProfile", default, skip_serializing_if = "Option::is_none")]
pub network_profile: Option<VirtualMachineScaleSetUpdateNetworkProfile>,
#[serde(rename = "securityProfile", default, skip_serializing_if = "Option::is_none")]
pub security_profile: Option<SecurityProfile>,
#[serde(rename = "diagnosticsProfile", default, skip_serializing_if = "Option::is_none")]
pub diagnostics_profile: Option<DiagnosticsProfile>,
#[serde(rename = "extensionProfile", default, skip_serializing_if = "Option::is_none")]
pub extension_profile: Option<VirtualMachineScaleSetExtensionProfile>,
#[serde(rename = "licenseType", default, skip_serializing_if = "Option::is_none")]
pub license_type: Option<String>,
#[serde(rename = "billingProfile", default, skip_serializing_if = "Option::is_none")]
pub billing_profile: Option<BillingProfile>,
#[serde(rename = "scheduledEventsProfile", default, skip_serializing_if = "Option::is_none")]
pub scheduled_events_profile: Option<ScheduledEventsProfile>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetVm {
#[serde(flatten)]
pub resource: Resource,
#[serde(rename = "instanceId", default, skip_serializing_if = "Option::is_none")]
pub instance_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<Sku>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineScaleSetVmProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub plan: Option<Plan>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub resources: Vec<VirtualMachineExtension>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub zones: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetVmExtension {
#[serde(flatten)]
pub sub_resource_read_only: SubResourceReadOnly,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineExtensionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetVmExtensionUpdate {
#[serde(flatten)]
pub sub_resource_read_only: SubResourceReadOnly,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineExtensionUpdateProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetVmExtensionsListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<VirtualMachineScaleSetVmExtension>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetVmExtensionsSummary {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "statusesSummary", default, skip_serializing_if = "Vec::is_empty")]
pub statuses_summary: Vec<VirtualMachineStatusCodeCount>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetVmInstanceIDs {
#[serde(rename = "instanceIds", default, skip_serializing_if = "Vec::is_empty")]
pub instance_ids: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetVmInstanceRequiredIDs {
#[serde(rename = "instanceIds")]
pub instance_ids: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetVmInstanceView {
#[serde(rename = "platformUpdateDomain", default, skip_serializing_if = "Option::is_none")]
pub platform_update_domain: Option<i32>,
#[serde(rename = "platformFaultDomain", default, skip_serializing_if = "Option::is_none")]
pub platform_fault_domain: Option<i32>,
#[serde(rename = "rdpThumbPrint", default, skip_serializing_if = "Option::is_none")]
pub rdp_thumb_print: Option<String>,
#[serde(rename = "vmAgent", default, skip_serializing_if = "Option::is_none")]
pub vm_agent: Option<VirtualMachineAgentInstanceView>,
#[serde(rename = "maintenanceRedeployStatus", default, skip_serializing_if = "Option::is_none")]
pub maintenance_redeploy_status: Option<MaintenanceRedeployStatus>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub disks: Vec<DiskInstanceView>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub extensions: Vec<VirtualMachineExtensionInstanceView>,
#[serde(rename = "vmHealth", default, skip_serializing_if = "Option::is_none")]
pub vm_health: Option<VirtualMachineHealthStatus>,
#[serde(rename = "bootDiagnostics", default, skip_serializing_if = "Option::is_none")]
pub boot_diagnostics: Option<BootDiagnosticsInstanceView>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub statuses: Vec<InstanceViewStatus>,
#[serde(rename = "assignedHost", default, skip_serializing_if = "Option::is_none")]
pub assigned_host: Option<String>,
#[serde(rename = "placementGroupId", default, skip_serializing_if = "Option::is_none")]
pub placement_group_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetVmListResult {
pub value: Vec<VirtualMachineScaleSetVm>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetVmNetworkProfileConfiguration {
#[serde(rename = "networkInterfaceConfigurations", default, skip_serializing_if = "Vec::is_empty")]
pub network_interface_configurations: Vec<VirtualMachineScaleSetNetworkConfiguration>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetVmProfile {
#[serde(rename = "osProfile", default, skip_serializing_if = "Option::is_none")]
pub os_profile: Option<VirtualMachineScaleSetOsProfile>,
#[serde(rename = "storageProfile", default, skip_serializing_if = "Option::is_none")]
pub storage_profile: Option<VirtualMachineScaleSetStorageProfile>,
#[serde(rename = "networkProfile", default, skip_serializing_if = "Option::is_none")]
pub network_profile: Option<VirtualMachineScaleSetNetworkProfile>,
#[serde(rename = "securityProfile", default, skip_serializing_if = "Option::is_none")]
pub security_profile: Option<SecurityProfile>,
#[serde(rename = "diagnosticsProfile", default, skip_serializing_if = "Option::is_none")]
pub diagnostics_profile: Option<DiagnosticsProfile>,
#[serde(rename = "extensionProfile", default, skip_serializing_if = "Option::is_none")]
pub extension_profile: Option<VirtualMachineScaleSetExtensionProfile>,
#[serde(rename = "licenseType", default, skip_serializing_if = "Option::is_none")]
pub license_type: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub priority: Option<Priority>,
#[serde(rename = "evictionPolicy", default, skip_serializing_if = "Option::is_none")]
pub eviction_policy: Option<EvictionPolicy>,
#[serde(rename = "billingProfile", default, skip_serializing_if = "Option::is_none")]
pub billing_profile: Option<BillingProfile>,
#[serde(rename = "scheduledEventsProfile", default, skip_serializing_if = "Option::is_none")]
pub scheduled_events_profile: Option<ScheduledEventsProfile>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetVmProperties {
#[serde(rename = "latestModelApplied", default, skip_serializing_if = "Option::is_none")]
pub latest_model_applied: Option<bool>,
#[serde(rename = "vmId", default, skip_serializing_if = "Option::is_none")]
pub vm_id: Option<String>,
#[serde(rename = "instanceView", default, skip_serializing_if = "Option::is_none")]
pub instance_view: Option<VirtualMachineScaleSetVmInstanceView>,
#[serde(rename = "hardwareProfile", default, skip_serializing_if = "Option::is_none")]
pub hardware_profile: Option<HardwareProfile>,
#[serde(rename = "storageProfile", default, skip_serializing_if = "Option::is_none")]
pub storage_profile: Option<StorageProfile>,
#[serde(rename = "additionalCapabilities", default, skip_serializing_if = "Option::is_none")]
pub additional_capabilities: Option<AdditionalCapabilities>,
#[serde(rename = "osProfile", default, skip_serializing_if = "Option::is_none")]
pub os_profile: Option<OsProfile>,
#[serde(rename = "securityProfile", default, skip_serializing_if = "Option::is_none")]
pub security_profile: Option<SecurityProfile>,
#[serde(rename = "networkProfile", default, skip_serializing_if = "Option::is_none")]
pub network_profile: Option<NetworkProfile>,
#[serde(rename = "networkProfileConfiguration", default, skip_serializing_if = "Option::is_none")]
pub network_profile_configuration: Option<VirtualMachineScaleSetVmNetworkProfileConfiguration>,
#[serde(rename = "diagnosticsProfile", default, skip_serializing_if = "Option::is_none")]
pub diagnostics_profile: Option<DiagnosticsProfile>,
#[serde(rename = "availabilitySet", default, skip_serializing_if = "Option::is_none")]
pub availability_set: Option<SubResource>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "licenseType", default, skip_serializing_if = "Option::is_none")]
pub license_type: Option<String>,
#[serde(rename = "modelDefinitionApplied", default, skip_serializing_if = "Option::is_none")]
pub model_definition_applied: Option<String>,
#[serde(rename = "protectionPolicy", default, skip_serializing_if = "Option::is_none")]
pub protection_policy: Option<VirtualMachineScaleSetVmProtectionPolicy>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetVmProtectionPolicy {
#[serde(rename = "protectFromScaleIn", default, skip_serializing_if = "Option::is_none")]
pub protect_from_scale_in: Option<bool>,
#[serde(rename = "protectFromScaleSetActions", default, skip_serializing_if = "Option::is_none")]
pub protect_from_scale_set_actions: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineScaleSetVmReimageParameters {
#[serde(flatten)]
pub virtual_machine_reimage_parameters: VirtualMachineReimageParameters,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineSize {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "numberOfCores", default, skip_serializing_if = "Option::is_none")]
pub number_of_cores: Option<i32>,
#[serde(rename = "osDiskSizeInMB", default, skip_serializing_if = "Option::is_none")]
pub os_disk_size_in_mb: Option<i32>,
#[serde(rename = "resourceDiskSizeInMB", default, skip_serializing_if = "Option::is_none")]
pub resource_disk_size_in_mb: Option<i32>,
#[serde(rename = "memoryInMB", default, skip_serializing_if = "Option::is_none")]
pub memory_in_mb: Option<i32>,
#[serde(rename = "maxDataDiskCount", default, skip_serializing_if = "Option::is_none")]
pub max_data_disk_count: Option<i32>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineSizeListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<VirtualMachineSize>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineSoftwarePatchProperties {
#[serde(rename = "patchId", default, skip_serializing_if = "Option::is_none")]
pub patch_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub kbid: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub classifications: Vec<String>,
#[serde(rename = "rebootBehavior", default, skip_serializing_if = "Option::is_none")]
pub reboot_behavior: Option<virtual_machine_software_patch_properties::RebootBehavior>,
#[serde(rename = "activityId", default, skip_serializing_if = "Option::is_none")]
pub activity_id: Option<String>,
#[serde(rename = "publishedDate", default, skip_serializing_if = "Option::is_none")]
pub published_date: Option<String>,
#[serde(rename = "lastModifiedDateTime", default, skip_serializing_if = "Option::is_none")]
pub last_modified_date_time: Option<String>,
#[serde(rename = "assessmentState", default, skip_serializing_if = "Option::is_none")]
pub assessment_state: Option<virtual_machine_software_patch_properties::AssessmentState>,
}
pub mod virtual_machine_software_patch_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum RebootBehavior {
NeverReboots,
AlwaysRequiresReboot,
CanRequestReboot,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AssessmentState {
Installed,
Failed,
Excluded,
NotSelected,
Pending,
Available,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineStatusCodeCount {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub count: Option<i32>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualMachineUpdate {
#[serde(flatten)]
pub update_resource: UpdateResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub plan: Option<Plan>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VirtualMachineProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<VirtualMachineIdentity>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub zones: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WinRmConfiguration {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub listeners: Vec<WinRmListener>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WinRmListener {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub protocol: Option<win_rm_listener::Protocol>,
#[serde(rename = "certificateUrl", default, skip_serializing_if = "Option::is_none")]
pub certificate_url: Option<String>,
}
pub mod win_rm_listener {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Protocol {
Http,
Https,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WindowsConfiguration {
#[serde(rename = "provisionVMAgent", default, skip_serializing_if = "Option::is_none")]
pub provision_vm_agent: Option<bool>,
#[serde(rename = "enableAutomaticUpdates", default, skip_serializing_if = "Option::is_none")]
pub enable_automatic_updates: Option<bool>,
#[serde(rename = "timeZone", default, skip_serializing_if = "Option::is_none")]
pub time_zone: Option<String>,
#[serde(rename = "additionalUnattendContent", default, skip_serializing_if = "Vec::is_empty")]
pub additional_unattend_content: Vec<AdditionalUnattendContent>,
#[serde(rename = "patchSettings", default, skip_serializing_if = "Option::is_none")]
pub patch_settings: Option<PatchSettings>,
#[serde(rename = "winRM", default, skip_serializing_if = "Option::is_none")]
pub win_rm: Option<WinRmConfiguration>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum EvictionPolicy {
Deallocate,
Delete,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Priority {
Regular,
Low,
Spot,
}
| 45.214482 | 136 | 0.716852 |
f77fcbf3965a178cd785b19324aab63c137ed113 | 536 | use std::fs;
use serde::Deserialize;
const CONFIG_PATH: &'static str = "./config/config.toml";
#[derive(Deserialize)]
pub struct Config {
pub application_id: u64,
pub administrator_id: u64,
pub db_path: String,
pub role_ids: RoleIds,
}
#[derive(Deserialize)]
pub struct RoleIds {
pub twitch: u64,
}
pub fn read_config() -> Config {
fs::read(CONFIG_PATH)
.as_deref()
.map(toml::from_slice)
.expect("Error while reading config file")
.expect("Error while parsing config file")
} | 21.44 | 57 | 0.654851 |
916f237a98c42eca9b93b5bf97a25ab6718e9c70 | 2,783 | #![feature(test)]
#![deny(
missing_docs,
missing_debug_implementations,
missing_copy_implementations,
trivial_casts,
trivial_numeric_casts,
unsafe_code,
unused_import_braces,
unused_qualifications
)]
#![cfg_attr(feature = "dev", feature(plugin))]
#![cfg_attr(feature = "dev", plugin(clippy))]
//! A crate for generating large, cryptographically secure prime numbers.
//! These numbers are seeded from the operating system's main source of
//! entropy, ensuring proper randomness.
//!
//! Numbers are verified to be prime by running the following three tests
//! during initialization:
//!
//! 1. Dividing the initial prime number candidate by the first 1,000 prime
//! numbers, checking the remainder. Should the remainder ever be zero, then
//! add two to the candidate and try again.
//!
//! 2. Run a Fermat Primality Test on the candidate. If it doesn't pass, add
//! two to the candidate and goto Step 1.
//!
//! 3. Finally, complete five rounds of the Miller-Rabin Primality Test.
//! Should any of the tests pass, add two to the candidate and goto Step 1.
//!
//! The preceding steps mirror those used by GnuPG, a leading PGP implementation
//! used by thousands of users all across the world.
//!
//! The prime numbers must be AT LEAST 512-bits long. Attempting to generate a
//! number less than 512-bits long will cause a panic.
//!
//! ## Example
//!
//! ```
//! extern crate pumpkin;
//!
//! use pumpkin::prime;
//!
//! fn main() {
//! // Generate 2, 2048-bit primes
//! let p = prime::new(2048);
//! let q = prime::new(2048);
//!
//! let n = p * q;
//! println!("{}", n); // Some 4096-bit composite number
//! }
//! ```
#[allow(unused_imports)]
#[macro_use]
extern crate custom_derive;
#[allow(unused_imports)]
#[macro_use]
extern crate newtype_derive;
extern crate ramp;
extern crate rand;
extern crate test;
mod common;
pub mod error;
pub mod prime;
pub mod safe_prime;
#[cfg(test)]
mod tests {
use super::{prime, safe_prime};
use rand::rngs::OsRng;
use test::Bencher;
#[bench]
fn bench_generate_512_bit_prime(b: &mut Bencher) {
let mut rngesus = OsRng::new().unwrap();
b.iter(|| prime::from_rng(512, &mut rngesus));
}
#[bench]
fn bench_generate_1024_bit_prime(b: &mut Bencher) {
let mut rngesus = OsRng::new().unwrap();
b.iter(|| prime::from_rng(1024, &mut rngesus));
}
#[bench]
fn bench_generate_2048_bit_prime(b: &mut Bencher) {
let mut rngesus = OsRng::new().unwrap();
b.iter(|| prime::from_rng(2048, &mut rngesus));
}
#[bench]
fn bench_generate_512_bit_safe_prime(b: &mut Bencher) {
let mut rngesus = OsRng::new().unwrap();
b.iter(|| safe_prime::from_rng(512, &mut rngesus));
}
}
| 27.83 | 80 | 0.65936 |
7a2ad969a1a390622c44e55062c6f09a1fa576f2 | 12,795 | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
/// Paginator for [`ListSignalingChannels`](crate::operation::ListSignalingChannels)
pub struct ListSignalingChannelsPaginator<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<crate::client::Handle<C, M, R>>,
builder: crate::input::list_signaling_channels_input::Builder,
}
impl<C, M, R> ListSignalingChannelsPaginator<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Create a new paginator-wrapper
pub(crate) fn new(
handle: std::sync::Arc<crate::client::Handle<C, M, R>>,
builder: crate::input::list_signaling_channels_input::Builder,
) -> Self {
Self { handle, builder }
}
/// Set the page size
///
/// _Note: this method will override any previously set value for `max_results`_
pub fn page_size(mut self, limit: i32) -> Self {
self.builder.max_results = Some(limit);
self
}
/// Create a flattened paginator
///
/// This paginator automatically flattens results using `channel_info_list`. Queries to the underlying service
/// are dispatched lazily.
pub fn items(self) -> crate::paginator::ListSignalingChannelsPaginatorItems<C, M, R> {
crate::paginator::ListSignalingChannelsPaginatorItems(self)
}
/// Create the pagination stream
///
/// _Note:_ No requests will be dispatched until the stream is used (eg. with [`.next().await`](tokio_stream::StreamExt::next)).
pub fn send(
self,
) -> impl tokio_stream::Stream<
Item = std::result::Result<
crate::output::ListSignalingChannelsOutput,
aws_smithy_http::result::SdkError<crate::error::ListSignalingChannelsError>,
>,
> + Unpin
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListSignalingChannelsInputOperationOutputAlias,
crate::output::ListSignalingChannelsOutput,
crate::error::ListSignalingChannelsError,
crate::input::ListSignalingChannelsInputOperationRetryAlias,
>,
{
// Move individual fields out of self for the borrow checker
let builder = self.builder;
let handle = self.handle;
aws_smithy_async::future::fn_stream::FnStream::new(move |tx| {
Box::pin(async move {
// Build the input for the first time. If required fields are missing, this is where we'll produce an early error.
let mut input = match builder.build().map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
}) {
Ok(input) => input,
Err(e) => {
let _ = tx.send(Err(e)).await;
return;
}
};
loop {
let op = match input.make_operation(&handle.conf).await.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
}) {
Ok(op) => op,
Err(e) => {
let _ = tx.send(Err(e)).await;
return;
}
};
let resp = handle.client.call(op).await;
// If the input member is None or it was an error
let done = match resp {
Ok(ref resp) => {
let new_token = crate::lens::reflens_structure_crate_output_list_signaling_channels_output_next_token(resp);
if new_token == input.next_token.as_ref() {
let _ = tx.send(Err(aws_smithy_http::result::SdkError::ConstructionFailure("next token did not change, aborting paginator. This indicates an SDK or AWS service bug.".into()))).await;
return;
}
input.next_token = new_token.cloned();
input.next_token.as_deref().unwrap_or_default().is_empty()
}
Err(_) => true,
};
if tx.send(resp).await.is_err() {
// receiving end was dropped
return;
}
if done {
return;
}
}
})
})
}
}
/// Paginator for [`ListStreams`](crate::operation::ListStreams)
pub struct ListStreamsPaginator<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<crate::client::Handle<C, M, R>>,
builder: crate::input::list_streams_input::Builder,
}
impl<C, M, R> ListStreamsPaginator<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Create a new paginator-wrapper
pub(crate) fn new(
handle: std::sync::Arc<crate::client::Handle<C, M, R>>,
builder: crate::input::list_streams_input::Builder,
) -> Self {
Self { handle, builder }
}
/// Set the page size
///
/// _Note: this method will override any previously set value for `max_results`_
pub fn page_size(mut self, limit: i32) -> Self {
self.builder.max_results = Some(limit);
self
}
/// Create a flattened paginator
///
/// This paginator automatically flattens results using `stream_info_list`. Queries to the underlying service
/// are dispatched lazily.
pub fn items(self) -> crate::paginator::ListStreamsPaginatorItems<C, M, R> {
crate::paginator::ListStreamsPaginatorItems(self)
}
/// Create the pagination stream
///
/// _Note:_ No requests will be dispatched until the stream is used (eg. with [`.next().await`](tokio_stream::StreamExt::next)).
pub fn send(
self,
) -> impl tokio_stream::Stream<
Item = std::result::Result<
crate::output::ListStreamsOutput,
aws_smithy_http::result::SdkError<crate::error::ListStreamsError>,
>,
> + Unpin
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListStreamsInputOperationOutputAlias,
crate::output::ListStreamsOutput,
crate::error::ListStreamsError,
crate::input::ListStreamsInputOperationRetryAlias,
>,
{
// Move individual fields out of self for the borrow checker
let builder = self.builder;
let handle = self.handle;
aws_smithy_async::future::fn_stream::FnStream::new(move |tx| {
Box::pin(async move {
// Build the input for the first time. If required fields are missing, this is where we'll produce an early error.
let mut input = match builder.build().map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
}) {
Ok(input) => input,
Err(e) => {
let _ = tx.send(Err(e)).await;
return;
}
};
loop {
let op = match input.make_operation(&handle.conf).await.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
}) {
Ok(op) => op,
Err(e) => {
let _ = tx.send(Err(e)).await;
return;
}
};
let resp = handle.client.call(op).await;
// If the input member is None or it was an error
let done = match resp {
Ok(ref resp) => {
let new_token = crate::lens::reflens_structure_crate_output_list_streams_output_next_token(resp);
if new_token == input.next_token.as_ref() {
let _ = tx.send(Err(aws_smithy_http::result::SdkError::ConstructionFailure("next token did not change, aborting paginator. This indicates an SDK or AWS service bug.".into()))).await;
return;
}
input.next_token = new_token.cloned();
input.next_token.as_deref().unwrap_or_default().is_empty()
}
Err(_) => true,
};
if tx.send(resp).await.is_err() {
// receiving end was dropped
return;
}
if done {
return;
}
}
})
})
}
}
/// Flattened paginator for `ListSignalingChannelsPaginator`
///
/// This is created with [`.items()`](ListSignalingChannelsPaginator::items)
pub struct ListSignalingChannelsPaginatorItems<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
>(ListSignalingChannelsPaginator<C, M, R>);
impl<C, M, R> ListSignalingChannelsPaginatorItems<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Create the pagination stream
///
/// _Note: No requests will be dispatched until the stream is used (eg. with [`.next().await`](tokio_stream::StreamExt::next))._
///
/// To read the entirety of the paginator, use [`.collect::<Result<Vec<_>, _>()`](tokio_stream::StreamExt::collect).
pub fn send(
self,
) -> impl tokio_stream::Stream<
Item = std::result::Result<
crate::model::ChannelInfo,
aws_smithy_http::result::SdkError<crate::error::ListSignalingChannelsError>,
>,
> + Unpin
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListSignalingChannelsInputOperationOutputAlias,
crate::output::ListSignalingChannelsOutput,
crate::error::ListSignalingChannelsError,
crate::input::ListSignalingChannelsInputOperationRetryAlias,
>,
{
aws_smithy_async::future::fn_stream::TryFlatMap::new(self.0.send()).flat_map(|page| crate::lens::lens_structure_crate_output_list_signaling_channels_output_channel_info_list(page).unwrap_or_default().into_iter())
}
}
/// Flattened paginator for `ListStreamsPaginator`
///
/// This is created with [`.items()`](ListStreamsPaginator::items)
pub struct ListStreamsPaginatorItems<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
>(ListStreamsPaginator<C, M, R>);
impl<C, M, R> ListStreamsPaginatorItems<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Create the pagination stream
///
/// _Note: No requests will be dispatched until the stream is used (eg. with [`.next().await`](tokio_stream::StreamExt::next))._
///
/// To read the entirety of the paginator, use [`.collect::<Result<Vec<_>, _>()`](tokio_stream::StreamExt::collect).
pub fn send(
self,
) -> impl tokio_stream::Stream<
Item = std::result::Result<
crate::model::StreamInfo,
aws_smithy_http::result::SdkError<crate::error::ListStreamsError>,
>,
> + Unpin
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListStreamsInputOperationOutputAlias,
crate::output::ListStreamsOutput,
crate::error::ListStreamsError,
crate::input::ListStreamsInputOperationRetryAlias,
>,
{
aws_smithy_async::future::fn_stream::TryFlatMap::new(self.0.send()).flat_map(|page| {
crate::lens::lens_structure_crate_output_list_streams_output_stream_info_list(page)
.unwrap_or_default()
.into_iter()
})
}
}
| 41.677524 | 220 | 0.573818 |
146668c39a0cc49ba1dc1939400a1aabf0d6eb4c | 29,756 | #![allow(clippy::mem_discriminant_non_enum)] // these actually *are* enums...
//! Usability api for tx2 kitsune transports.
use crate::codec::*;
use crate::tx2::tx2_adapter::Uniq;
use crate::tx2::tx2_pool::*;
use crate::tx2::tx2_utils::*;
use crate::tx2::*;
use crate::*;
use futures::future::{FutureExt, TryFutureExt};
use futures::stream::Stream;
use std::collections::HashMap;
use std::sync::atomic;
static MSG_ID: atomic::AtomicU64 = atomic::AtomicU64::new(1);
fn next_msg_id() -> u64 {
MSG_ID.fetch_add(1, atomic::Ordering::Relaxed)
}
type RSend<C> = tokio::sync::oneshot::Sender<KitsuneResult<C>>;
type ShareRMap<C> = Arc<Share<RMap<C>>>;
struct RMapItem<C: Codec + 'static + Send + Unpin> {
sender: RSend<C>,
start: tokio::time::Instant,
timeout: std::time::Duration,
dbg_name: &'static str,
req_byte_count: usize,
local_cert: Tx2Cert,
peer_cert: Tx2Cert,
}
struct RMap<C: Codec + 'static + Send + Unpin>(HashMap<(Uniq, u64), RMapItem<C>>);
impl<C: Codec + 'static + Send + Unpin> RMap<C> {
pub fn new() -> Self {
Self(HashMap::new())
}
#[allow(clippy::too_many_arguments)]
pub fn insert(
&mut self,
uniq: Uniq,
timeout: KitsuneTimeout,
msg_id: u64,
s_res: RSend<C>,
dbg_name: &'static str,
req_byte_count: usize,
local_cert: Tx2Cert,
peer_cert: Tx2Cert,
) {
let timeout = timeout.time_remaining();
self.0.insert(
(uniq, msg_id),
RMapItem {
sender: s_res,
start: tokio::time::Instant::now(),
timeout,
dbg_name,
req_byte_count,
local_cert,
peer_cert,
},
);
}
pub fn respond(&mut self, uniq: Uniq, resp_byte_count: usize, msg_id: u64, c: C) {
let resp_dbg_name = c.variant_type();
if let Some(RMapItem {
sender,
start,
timeout,
dbg_name,
req_byte_count,
local_cert,
peer_cert,
}) = self.0.remove(&(uniq, msg_id))
{
let elapsed = start.elapsed();
crate::metrics::metric_push_api_req_res_elapsed_ms(elapsed.as_millis() as u64);
let elapsed_s = elapsed.as_secs_f64();
tracing::debug!(
%dbg_name,
%req_byte_count,
%resp_dbg_name,
%resp_byte_count,
?local_cert,
?peer_cert,
%elapsed_s,
"(api) req success",
);
if elapsed_s / timeout.as_secs_f64() > 0.75 {
tracing::warn!(
%dbg_name,
%req_byte_count,
%resp_dbg_name,
%resp_byte_count,
?local_cert,
?peer_cert,
%elapsed_s,
"(api) req approaching timeout (> 75%)",
);
}
// if the recv side is dropped, we no longer need to respond
// so it's ok to ignore errors here.
let _ = sender.send(Ok(c));
} else {
tracing::warn!(
%resp_dbg_name,
%resp_byte_count,
"(api) req UNMATCHED RESPONSE",
);
}
}
pub fn respond_err(&mut self, uniq: Uniq, msg_id: u64, err: KitsuneError) {
if let Some(RMapItem {
sender,
start,
dbg_name,
req_byte_count,
local_cert,
peer_cert,
..
}) = self.0.remove(&(uniq, msg_id))
{
let elapsed_s = start.elapsed().as_secs_f64();
tracing::debug!(
%dbg_name,
%req_byte_count,
?local_cert,
?peer_cert,
%elapsed_s,
?err,
"(api) req err",
);
// if the recv side is dropped, we no longer need to respond
// so it's ok to ignore errors here.
let _ = sender.send(Err(err));
}
}
}
/// Cleanup our map when the request future completes
/// either by recieving the response or timing out.
struct RMapDropCleanup<C: Codec + 'static + Send + Unpin>(ShareRMap<C>, Uniq, u64);
impl<C: Codec + 'static + Send + Unpin> Drop for RMapDropCleanup<C> {
fn drop(&mut self) {
let _ = self.0.share_mut(|i, _| {
if let Some(RMapItem {
start,
dbg_name,
local_cert,
peer_cert,
..
}) = i.0.remove(&(self.1, self.2))
{
let elapsed_s = start.elapsed().as_secs_f64();
tracing::warn!(
%dbg_name,
?local_cert,
?peer_cert,
%elapsed_s,
"(api) req dropped",
);
}
Ok(())
});
}
}
#[allow(clippy::too_many_arguments)]
fn rmap_insert<C: Codec + 'static + Send + Unpin>(
rmap: ShareRMap<C>,
uniq: Uniq,
timeout: KitsuneTimeout,
msg_id: u64,
s_res: RSend<C>,
dbg_name: &'static str,
req_byte_count: usize,
local_cert: Tx2Cert,
peer_cert: Tx2Cert,
) -> KitsuneResult<RMapDropCleanup<C>> {
rmap.share_mut(move |i, _| {
i.insert(
uniq,
timeout,
msg_id,
s_res,
dbg_name,
req_byte_count,
local_cert,
peer_cert,
);
Ok(())
})?;
Ok(RMapDropCleanup(rmap, uniq, msg_id))
}
/// A connection handle - use this to manage an open connection.
#[derive(Clone)]
pub struct Tx2ConHnd<C: Codec + 'static + Send + Unpin> {
local_cert: Tx2Cert,
con: ConHnd,
url: TxUrl,
rmap: ShareRMap<C>,
metrics: Arc<Tx2ApiMetrics>,
}
impl<C: Codec + 'static + Send + Unpin> std::fmt::Debug for Tx2ConHnd<C> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("Tx2ConHnd").field(&self.con).finish()
}
}
impl<C: Codec + 'static + Send + Unpin> Tx2ConHnd<C> {
fn new(
local_cert: Tx2Cert,
con: ConHnd,
url: TxUrl,
rmap: ShareRMap<C>,
metrics: Arc<Tx2ApiMetrics>,
) -> Self {
Self {
local_cert,
con,
url,
rmap,
metrics,
}
}
}
impl<C: Codec + 'static + Send + Unpin> PartialEq for Tx2ConHnd<C> {
fn eq(&self, oth: &Self) -> bool {
self.con.uniq().eq(&oth.con.uniq())
}
}
impl<C: Codec + 'static + Send + Unpin> Eq for Tx2ConHnd<C> {}
impl<C: Codec + 'static + Send + Unpin> std::hash::Hash for Tx2ConHnd<C> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.con.uniq().hash(state)
}
}
impl<C: Codec + 'static + Send + Unpin> Tx2ConHnd<C> {
/// Get the opaque Uniq identifier for this connection.
pub fn uniq(&self) -> Uniq {
self.con.uniq()
}
/// Get the remote address of this connection.
pub fn peer_addr(&self) -> KitsuneResult<TxUrl> {
self.con.peer_addr()
}
/// Get the certificate digest of the remote.
pub fn peer_cert(&self) -> Tx2Cert {
self.con.peer_cert()
}
/// Is this connection closed?
pub fn is_closed(&self) -> bool {
self.con.is_closed()
}
/// Close this connection.
pub fn close(
&self,
code: u32,
reason: &str,
) -> impl std::future::Future<Output = ()> + 'static + Send {
self.con.close(code, reason)
}
fn priv_notify(
&self,
data: PoolBuf,
timeout: KitsuneTimeout,
dbg_name: &'static str,
) -> impl std::future::Future<Output = KitsuneResult<()>> + 'static + Send {
let this = self.clone();
async move {
let msg_id = MsgId::new_notify();
let len = data.len();
this.con.write(msg_id, data, timeout).await?;
this.metrics.write_len(dbg_name, len);
let peer_cert = this.peer_cert();
tracing::debug!(
%dbg_name,
req_byte_count=%len,
local_cert=?this.local_cert,
?peer_cert,
"(api) notify",
);
Ok(())
}
}
fn priv_request(
&self,
data: PoolBuf,
timeout: KitsuneTimeout,
dbg_name: &'static str,
) -> impl std::future::Future<Output = KitsuneResult<C>> + 'static + Send {
let this = self.clone();
async move {
let msg_id = next_msg_id();
let (s_res, r_res) = tokio::sync::oneshot::channel::<KitsuneResult<C>>();
let peer_cert = this.peer_cert();
let len = data.len();
// insert our response receive handler
// Cleanup our map when this future completes
// either by recieving the response or timing out.
let _drop_cleanup = rmap_insert(
this.rmap.clone(),
this.con.uniq(),
timeout,
msg_id,
s_res,
dbg_name,
len,
this.local_cert.clone(),
peer_cert,
)?;
this.con
.write(MsgId::new(msg_id).as_req(), data, timeout)
.await?;
this.metrics.write_len(dbg_name, len);
timeout.mix(r_res.map_err(KitsuneError::other)).await?
}
}
/// Write a notify to this connection.
pub fn notify(
&self,
data: &C,
timeout: KitsuneTimeout,
) -> impl std::future::Future<Output = KitsuneResult<()>> + 'static + Send {
let dbg_name = data.variant_type();
let mut buf = PoolBuf::new();
if let Err(e) = data.encode(&mut buf) {
return async move { Err(KitsuneError::other(e)) }.boxed();
}
self.priv_notify(buf, timeout, dbg_name).boxed()
}
/// Write a request to this connection.
pub fn request(
&self,
data: &C,
timeout: KitsuneTimeout,
) -> impl std::future::Future<Output = KitsuneResult<C>> + 'static + Send {
let dbg_name = data.variant_type();
let mut buf = PoolBuf::new();
if let Err(e) = data.encode(&mut buf) {
return async move { Err(KitsuneError::other(e)) }.boxed();
}
self.priv_request(buf, timeout, dbg_name).boxed()
}
}
/// An endpoint handle - use this to manage a bound endpoint.
#[derive(Clone)]
pub struct Tx2EpHnd<C: Codec + 'static + Send + Unpin>(
EpHnd,
ShareRMap<C>,
Arc<Tx2ApiMetrics>,
Tx2Cert,
);
impl<C: Codec + 'static + Send + Unpin> Tx2EpHnd<C> {
fn new(local_cert: Tx2Cert, ep: EpHnd, metrics: Arc<Tx2ApiMetrics>) -> Self {
let rmap = Arc::new(Share::new(RMap::new()));
Self(ep, rmap, metrics, local_cert)
}
}
impl<C: Codec + 'static + Send + Unpin> PartialEq for Tx2EpHnd<C> {
fn eq(&self, oth: &Self) -> bool {
self.0.uniq().eq(&oth.0.uniq())
}
}
impl<C: Codec + 'static + Send + Unpin> Eq for Tx2EpHnd<C> {}
impl<C: Codec + 'static + Send + Unpin> std::hash::Hash for Tx2EpHnd<C> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.0.uniq().hash(state);
}
}
impl<C: Codec + 'static + Send + Unpin> Tx2EpHnd<C> {
/// Capture a debugging internal state dump.
pub fn debug(&self) -> serde_json::Value {
self.0.debug()
}
/// Get the opaque Uniq identifier for this endpoint.
pub fn uniq(&self) -> Uniq {
self.0.uniq()
}
/// Get the bound local address of this endpoint.
pub fn local_addr(&self) -> KitsuneResult<TxUrl> {
self.0.local_addr()
}
/// Get the local certificate digest.
pub fn local_cert(&self) -> Tx2Cert {
self.0.local_cert()
}
/// Is this endpoint closed?
pub fn is_closed(&self) -> bool {
self.0.is_closed()
}
/// Close this endpoint.
pub fn close(
&self,
code: u32,
reason: &str,
) -> impl std::future::Future<Output = ()> + 'static + Send {
self.0.close(code, reason)
}
/// Force close a specific connection.
pub fn close_connection(
&self,
remote: TxUrl,
code: u32,
reason: &str,
) -> impl std::future::Future<Output = ()> + 'static + Send {
self.0.close_connection(remote, code, reason)
}
/// Get an existing connection.
/// If one is not available, establish a new connection.
pub fn get_connection<U: Into<TxUrl>>(
&self,
remote: U,
timeout: KitsuneTimeout,
) -> impl std::future::Future<Output = KitsuneResult<Tx2ConHnd<C>>> + 'static + Send {
let remote = remote.into();
let rmap = self.1.clone();
let metrics = self.2.clone();
let local_cert = self.3.clone();
let fut = self.0.get_connection(remote.clone(), timeout);
async move {
let con = fut.await?;
Ok(Tx2ConHnd::new(local_cert, con, remote, rmap, metrics))
}
}
/// Write a notify to this connection.
pub fn notify<U: Into<TxUrl>>(
&self,
remote: U,
data: &C,
timeout: KitsuneTimeout,
) -> impl std::future::Future<Output = KitsuneResult<()>> + 'static + Send {
let dbg_name = data.variant_type();
let mut buf = PoolBuf::new();
if let Err(e) = data.encode(&mut buf) {
return async move { Err(KitsuneError::other(e)) }.boxed();
}
let con_fut = self.get_connection(remote.into(), timeout);
futures::future::FutureExt::boxed(async move {
con_fut.await?.priv_notify(buf, timeout, dbg_name).await
})
}
/// Write a request to this connection.
pub fn request<U: Into<TxUrl>>(
&self,
remote: U,
data: &C,
timeout: KitsuneTimeout,
) -> impl std::future::Future<Output = KitsuneResult<C>> + 'static + Send {
let dbg_name = data.variant_type();
let mut buf = PoolBuf::new();
if let Err(e) = data.encode(&mut buf) {
return async move { Err(KitsuneError::other(e)) }.boxed();
}
let con_fut = self.get_connection(remote.into(), timeout);
futures::future::FutureExt::boxed(async move {
con_fut.await?.priv_request(buf, timeout, dbg_name).await
})
}
}
/// Respond to a Tx2EpIncomingRequest
pub struct Tx2Respond<C: Codec + 'static + Send + Unpin> {
local_cert: Tx2Cert,
peer_cert: Tx2Cert,
time: tokio::time::Instant,
dbg_name: &'static str,
req_byte_count: usize,
con: ConHnd,
msg_id: u64,
_p: std::marker::PhantomData<C>,
}
impl<C: Codec + 'static + Send + Unpin> std::fmt::Debug for Tx2Respond<C> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("Tx2Respond").finish()
}
}
impl<C: Codec + 'static + Send + Unpin> Tx2Respond<C> {
fn new(
local_cert: Tx2Cert,
peer_cert: Tx2Cert,
dbg_name: &'static str,
req_byte_count: usize,
con: ConHnd,
msg_id: u64,
) -> Self {
let time = tokio::time::Instant::now();
Self {
local_cert,
peer_cert,
time,
dbg_name,
req_byte_count,
con,
msg_id,
_p: std::marker::PhantomData,
}
}
/// Respond to a Tx2EpIncomingRequest
pub fn respond(
self,
data: C,
timeout: KitsuneTimeout,
) -> impl std::future::Future<Output = KitsuneResult<()>> + 'static + Send {
let Tx2Respond {
local_cert,
peer_cert,
time,
dbg_name,
req_byte_count,
con,
msg_id,
..
} = self;
async move {
let mut buf = PoolBuf::new();
data.encode(&mut buf).map_err(KitsuneError::other)?;
let elapsed_s = time.elapsed().as_secs_f64();
let resp_dbg_name = data.variant_type();
let resp_byte_count = buf.len();
tracing::debug!(
%dbg_name,
%req_byte_count,
%resp_dbg_name,
%resp_byte_count,
?local_cert,
?peer_cert,
%elapsed_s,
"(api) res",
);
con.write(MsgId::new(msg_id).as_res(), buf, timeout).await
}
}
}
/// Data associated with an IncomingConnection EpEvent
#[derive(Debug)]
pub struct Tx2EpConnection<C: Codec + 'static + Send + Unpin> {
/// the remote connection handle (could be closed)
pub con: Tx2ConHnd<C>,
/// the remote url from which this data originated
/// this is included incase the con is closed
pub url: TxUrl,
}
/// Data associated with an IncomingRequest EpEvent
#[derive(Debug)]
pub struct Tx2EpIncomingRequest<C: Codec + 'static + Send + Unpin> {
/// the remote connection handle (could be closed)
pub con: Tx2ConHnd<C>,
/// the remote url from which this data originated
/// this is included incase the con is closed
pub url: TxUrl,
/// the actual incoming message data
pub data: C,
/// callback for responding
pub respond: Tx2Respond<C>,
}
/// Data associated with an IncomingNotify EpEvent
#[derive(Debug)]
pub struct Tx2EpIncomingNotify<C: Codec + 'static + Send + Unpin> {
/// the remote connection handle (could be closed)
pub con: Tx2ConHnd<C>,
/// the remote url from which this data originated
/// this is included incase the con is closed
pub url: TxUrl,
/// the actual incoming message data
pub data: C,
}
/// Data associated with a ConnectionClosed EpEvent
#[derive(Debug)]
pub struct Tx2EpConnectionClosed<C: Codec + 'static + Send + Unpin> {
/// the remote connection handle (could be closed)
pub con: Tx2ConHnd<C>,
/// the remote url this used to be connected to
pub url: TxUrl,
/// the code # indicating why the connection was closed
pub code: u32,
/// the human string reason this connection was closed
pub reason: String,
}
/// Event emitted by a transport endpoint.
#[derive(Debug)]
pub enum Tx2EpEvent<C: Codec + 'static + Send + Unpin> {
/// We've established an incoming connection.
OutgoingConnection(Tx2EpConnection<C>),
/// We've accepted an incoming connection.
IncomingConnection(Tx2EpConnection<C>),
/// We've received an incoming request on an open connection.
IncomingRequest(Tx2EpIncomingRequest<C>),
/// We've received an incoming notification on an open connection.
IncomingNotify(Tx2EpIncomingNotify<C>),
/// A connection has closed (Url, Code, Reason).
ConnectionClosed(Tx2EpConnectionClosed<C>),
/// A non-fatal internal error.
Error(KitsuneError),
/// We got an internal event...
/// ignore this and poll again.
Tick,
/// The endpoint has closed.
EndpointClosed,
}
/// Represents a bound endpoint. To manage this endpoint, see handle()/Tx2EpHnd.
/// To receive events from this endpoint, poll_next this instance as a Stream.
pub struct Tx2Ep<C: Codec + 'static + Send + Unpin>(Tx2EpHnd<C>, Ep, Arc<Tx2ApiMetrics>, Tx2Cert);
impl<C: Codec + 'static + Send + Unpin> Stream for Tx2Ep<C> {
type Item = Tx2EpEvent<C>;
fn poll_next(
mut self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Option<Self::Item>> {
let rmap = self.0 .1.clone();
let local_cert = self.3.clone();
let inner = &mut self.1;
futures::pin_mut!(inner);
match Stream::poll_next(inner, cx) {
std::task::Poll::Ready(Some(evt)) => {
let evt = match evt {
EpEvent::OutgoingConnection(EpConnection { con, url }) => {
Tx2EpEvent::OutgoingConnection(Tx2EpConnection {
con: Tx2ConHnd::new(local_cert, con, url.clone(), rmap, self.2.clone()),
url,
})
}
EpEvent::IncomingConnection(EpConnection { con, url }) => {
Tx2EpEvent::IncomingConnection(Tx2EpConnection {
con: Tx2ConHnd::new(local_cert, con, url.clone(), rmap, self.2.clone()),
url,
})
}
EpEvent::IncomingData(EpIncomingData {
con,
url,
msg_id,
data,
}) => {
let peer_cert = con.peer_cert();
let len = data.len();
let (_, c) = match C::decode_ref(&data) {
Err(e) => {
// TODO - close connection?
return std::task::Poll::Ready(Some(Tx2EpEvent::Error(
KitsuneError::other(e),
)));
}
Ok(c) => c,
};
let dbg_name = c.variant_type();
match msg_id.get_type() {
MsgIdType::Notify => Tx2EpEvent::IncomingNotify(Tx2EpIncomingNotify {
con: Tx2ConHnd::new(
local_cert,
con.clone(),
url.clone(),
rmap,
self.2.clone(),
),
url,
data: c,
}),
MsgIdType::Req => Tx2EpEvent::IncomingRequest(Tx2EpIncomingRequest {
con: Tx2ConHnd::new(
local_cert.clone(),
con.clone(),
url.clone(),
rmap,
self.2.clone(),
),
url,
data: c,
respond: Tx2Respond::new(
local_cert,
peer_cert,
dbg_name,
len,
con,
msg_id.as_id(),
),
}),
MsgIdType::Res => {
let _ = rmap.share_mut(move |i, _| {
i.respond(con.uniq(), len, msg_id.as_id(), c);
Ok(())
});
Tx2EpEvent::Tick
}
}
}
EpEvent::IncomingError(EpIncomingError {
con, msg_id, err, ..
}) => match msg_id.get_type() {
MsgIdType::Res => {
let _ = rmap.share_mut(move |i, _| {
i.respond_err(con.uniq(), msg_id.as_id(), err);
Ok(())
});
Tx2EpEvent::Tick
}
_ => {
// TODO - should this be a connection-specific
// error type, so we can give the con handle?
Tx2EpEvent::Error(err)
}
},
EpEvent::ConnectionClosed(EpConnectionClosed {
con,
url,
code,
reason,
}) => Tx2EpEvent::ConnectionClosed(Tx2EpConnectionClosed {
con: Tx2ConHnd::new(local_cert, con, url.clone(), rmap, self.2.clone()),
url,
code,
reason,
}),
EpEvent::Error(e) => Tx2EpEvent::Error(e),
EpEvent::EndpointClosed => Tx2EpEvent::EndpointClosed,
};
std::task::Poll::Ready(Some(evt))
}
std::task::Poll::Ready(None) => std::task::Poll::Ready(None),
std::task::Poll::Pending => std::task::Poll::Pending,
}
}
}
impl<C: Codec + 'static + Send + Unpin> Tx2Ep<C> {
/// A cheaply clone-able handle to this endpoint.
pub fn handle(&self) -> &Tx2EpHnd<C> {
&self.0
}
}
type WriteLenCb = Box<dyn Fn(&'static str, usize) + 'static + Send + Sync>;
/// Metrics callback manager to be injected into the endpoint
pub struct Tx2ApiMetrics {
write_len: Option<WriteLenCb>,
}
impl Default for Tx2ApiMetrics {
fn default() -> Self {
Self::new()
}
}
impl Tx2ApiMetrics {
/// Construct a new default Tx2ApiMetrics with no set callbacks
pub fn new() -> Self {
Self { write_len: None }
}
/// This callback will be invoked when we successfully write data
/// to a transport connection.
pub fn set_write_len<F>(mut self, f: F) -> Self
where
F: Fn(&'static str, usize) + 'static + Send + Sync,
{
let f: WriteLenCb = Box::new(f);
self.write_len = Some(f);
self
}
fn write_len(&self, d: &'static str, l: usize) {
if let Some(cb) = &self.write_len {
cb(d, l)
}
}
}
/// Construct a new Tx2EpFactory instance from a pool EpFactory
pub fn tx2_api<C: Codec + 'static + Send + Unpin>(
factory: EpFactory,
metrics: Tx2ApiMetrics,
) -> Tx2EpFactory<C> {
Tx2EpFactory::new(factory, metrics)
}
/// Endpoint binding factory - lets us easily pass around logic
/// for later binding network transports.
pub struct Tx2EpFactory<C: Codec + 'static + Send + Unpin>(
EpFactory,
Arc<Tx2ApiMetrics>,
std::marker::PhantomData<C>,
);
impl<C: Codec + 'static + Send + Unpin> Tx2EpFactory<C> {
/// Construct a new Tx2EpFactory instance from a frontend EpFactory
pub fn new(factory: EpFactory, metrics: Tx2ApiMetrics) -> Self {
Self(factory, Arc::new(metrics), std::marker::PhantomData)
}
/// Bind a new local transport endpoint.
pub fn bind<U: Into<TxUrl>>(
&self,
bind_spec: U,
timeout: KitsuneTimeout,
) -> impl std::future::Future<Output = KitsuneResult<Tx2Ep<C>>> + 'static + Send {
let metrics = self.1.clone();
let fut = self.0.bind(bind_spec.into(), timeout);
async move {
let ep = fut.await?;
let ep_hnd = ep.handle().clone();
let local_cert = ep_hnd.local_cert();
Ok(Tx2Ep(
Tx2EpHnd::new(local_cert.clone(), ep_hnd, metrics.clone()),
ep,
metrics,
local_cert,
))
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tx2::tx2_pool_promote::*;
use futures::stream::StreamExt;
#[tokio::test(flavor = "multi_thread")]
async fn test_tx2_api() {
observability::test_run().ok();
tracing::trace!("bob");
let t = KitsuneTimeout::from_millis(5000);
crate::write_codec_enum! {
codec Test {
One(0x01) {
data.0: usize,
},
}
}
fn handle(mut ep: Tx2Ep<Test>) -> tokio::task::JoinHandle<KitsuneResult<()>> {
metric_task(async move {
while let Some(evt) = ep.next().await {
if let Tx2EpEvent::IncomingRequest(Tx2EpIncomingRequest {
data, respond, ..
}) = evt
{
let val = match data {
Test::One(One { data }) => data,
};
let t = KitsuneTimeout::from_millis(5000);
respond.respond(Test::one(val + 1), t).await.unwrap();
}
}
Ok(())
})
}
let mk_ep = || async {
let f = tx2_mem_adapter(MemConfig::default()).await.unwrap();
let f = tx2_pool_promote(f, Default::default());
let f = tx2_api(f, Default::default());
f.bind("none:", t).await.unwrap()
};
let ep1 = mk_ep().await;
let ep1_hnd = ep1.handle().clone();
let ep1_task = handle(ep1);
let ep2 = mk_ep().await;
let ep2_hnd = ep2.handle().clone();
let ep2_task = handle(ep2);
let addr2 = ep2_hnd.local_addr().unwrap();
println!("addr2: {}", addr2);
let con = ep1_hnd.get_connection(addr2, t).await.unwrap();
let res = con.request(&Test::one(42), t).await.unwrap();
assert_eq!(&Test::one(43), &res);
ep1_hnd.close(0, "").await;
ep2_hnd.close(0, "").await;
ep1_task.await.unwrap().unwrap();
ep2_task.await.unwrap().unwrap();
}
}
| 30.96358 | 100 | 0.499798 |
d688cef6594a00d680a527116fd9e248220f6562 | 789 | /*
* Ory APIs
*
* Documentation for all public and administrative Ory APIs. Administrative APIs can only be accessed with a valid Personal Access Token. Public APIs are mostly used in browsers.
*
* The version of the OpenAPI document: v0.0.1-alpha.30
* Contact: [email protected]
* Generated by: https://openapi-generator.tech
*/
/// ActiveProject : The Active Project ID
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ActiveProject {
/// The Active Project ID format: uuid
#[serde(rename = "project_id", skip_serializing_if = "Option::is_none")]
pub project_id: Option<String>,
}
impl ActiveProject {
/// The Active Project ID
pub fn new() -> ActiveProject {
ActiveProject {
project_id: None,
}
}
}
| 24.65625 | 179 | 0.679341 |
095cd43ea13fb2f9bda22d36d3a7d9dc690fd8b9 | 14,717 | use super::REDUNDANT_PATTERN_MATCHING;
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source::snippet;
use clippy_utils::sugg::Sugg;
use clippy_utils::ty::needs_ordered_drop;
use clippy_utils::{higher, match_def_path};
use clippy_utils::{is_lang_ctor, is_trait_method, paths};
use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::LangItem::{OptionNone, PollPending};
use rustc_hir::{
intravisit::{walk_expr, Visitor},
Arm, Block, Expr, ExprKind, Node, Pat, PatKind, QPath, UnOp,
};
use rustc_lint::LateContext;
use rustc_middle::ty::{self, subst::GenericArgKind, DefIdTree, Ty};
use rustc_span::sym;
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if let Some(higher::WhileLet { let_pat, let_expr, .. }) = higher::WhileLet::hir(expr) {
find_sugg_for_if_let(cx, expr, let_pat, let_expr, "while", false);
}
}
pub(super) fn check_if_let<'tcx>(
cx: &LateContext<'tcx>,
expr: &'tcx Expr<'_>,
pat: &'tcx Pat<'_>,
scrutinee: &'tcx Expr<'_>,
has_else: bool,
) {
find_sugg_for_if_let(cx, expr, pat, scrutinee, "if", has_else);
}
// Extract the generic arguments out of a type
fn try_get_generic_ty(ty: Ty<'_>, index: usize) -> Option<Ty<'_>> {
if_chain! {
if let ty::Adt(_, subs) = ty.kind();
if let Some(sub) = subs.get(index);
if let GenericArgKind::Type(sub_ty) = sub.unpack();
then {
Some(sub_ty)
} else {
None
}
}
}
// Checks if there are any temporaries created in the given expression for which drop order
// matters.
fn temporaries_need_ordered_drop<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> bool {
struct V<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
res: bool,
}
impl<'a, 'tcx> Visitor<'tcx> for V<'a, 'tcx> {
fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
match expr.kind {
// Taking the reference of a value leaves a temporary
// e.g. In `&String::new()` the string is a temporary value.
// Remaining fields are temporary values
// e.g. In `(String::new(), 0).1` the string is a temporary value.
ExprKind::AddrOf(_, _, expr) | ExprKind::Field(expr, _) => {
if !matches!(expr.kind, ExprKind::Path(_)) {
if needs_ordered_drop(self.cx, self.cx.typeck_results().expr_ty(expr)) {
self.res = true;
} else {
self.visit_expr(expr);
}
}
},
// the base type is alway taken by reference.
// e.g. In `(vec![0])[0]` the vector is a temporary value.
ExprKind::Index(base, index) => {
if !matches!(base.kind, ExprKind::Path(_)) {
if needs_ordered_drop(self.cx, self.cx.typeck_results().expr_ty(base)) {
self.res = true;
} else {
self.visit_expr(base);
}
}
self.visit_expr(index);
},
// Method calls can take self by reference.
// e.g. In `String::new().len()` the string is a temporary value.
ExprKind::MethodCall(_, [self_arg, args @ ..], _) => {
if !matches!(self_arg.kind, ExprKind::Path(_)) {
let self_by_ref = self
.cx
.typeck_results()
.type_dependent_def_id(expr.hir_id)
.map_or(false, |id| self.cx.tcx.fn_sig(id).skip_binder().inputs()[0].is_ref());
if self_by_ref && needs_ordered_drop(self.cx, self.cx.typeck_results().expr_ty(self_arg)) {
self.res = true;
} else {
self.visit_expr(self_arg);
}
}
args.iter().for_each(|arg| self.visit_expr(arg));
},
// Either explicitly drops values, or changes control flow.
ExprKind::DropTemps(_)
| ExprKind::Ret(_)
| ExprKind::Break(..)
| ExprKind::Yield(..)
| ExprKind::Block(Block { expr: None, .. }, _)
| ExprKind::Loop(..) => (),
// Only consider the final expression.
ExprKind::Block(Block { expr: Some(expr), .. }, _) => self.visit_expr(expr),
_ => walk_expr(self, expr),
}
}
}
let mut v = V { cx, res: false };
v.visit_expr(expr);
v.res
}
fn find_sugg_for_if_let<'tcx>(
cx: &LateContext<'tcx>,
expr: &'tcx Expr<'_>,
let_pat: &Pat<'_>,
let_expr: &'tcx Expr<'_>,
keyword: &'static str,
has_else: bool,
) {
// also look inside refs
// if we have &None for example, peel it so we can detect "if let None = x"
let check_pat = match let_pat.kind {
PatKind::Ref(inner, _mutability) => inner,
_ => let_pat,
};
let op_ty = cx.typeck_results().expr_ty(let_expr);
// Determine which function should be used, and the type contained by the corresponding
// variant.
let (good_method, inner_ty) = match check_pat.kind {
PatKind::TupleStruct(ref qpath, [sub_pat], _) => {
if let PatKind::Wild = sub_pat.kind {
let res = cx.typeck_results().qpath_res(qpath, check_pat.hir_id);
let Some(id) = res.opt_def_id().map(|ctor_id| cx.tcx.parent(ctor_id)) else { return };
let lang_items = cx.tcx.lang_items();
if Some(id) == lang_items.result_ok_variant() {
("is_ok()", try_get_generic_ty(op_ty, 0).unwrap_or(op_ty))
} else if Some(id) == lang_items.result_err_variant() {
("is_err()", try_get_generic_ty(op_ty, 1).unwrap_or(op_ty))
} else if Some(id) == lang_items.option_some_variant() {
("is_some()", op_ty)
} else if Some(id) == lang_items.poll_ready_variant() {
("is_ready()", op_ty)
} else if match_def_path(cx, id, &paths::IPADDR_V4) {
("is_ipv4()", op_ty)
} else if match_def_path(cx, id, &paths::IPADDR_V6) {
("is_ipv6()", op_ty)
} else {
return;
}
} else {
return;
}
},
PatKind::Path(ref path) => {
let method = if is_lang_ctor(cx, path, OptionNone) {
"is_none()"
} else if is_lang_ctor(cx, path, PollPending) {
"is_pending()"
} else {
return;
};
// `None` and `Pending` don't have an inner type.
(method, cx.tcx.types.unit)
},
_ => return,
};
// If this is the last expression in a block or there is an else clause then the whole
// type needs to be considered, not just the inner type of the branch being matched on.
// Note the last expression in a block is dropped after all local bindings.
let check_ty = if has_else
|| (keyword == "if" && matches!(cx.tcx.hir().parent_iter(expr.hir_id).next(), Some((_, Node::Block(..)))))
{
op_ty
} else {
inner_ty
};
// All temporaries created in the scrutinee expression are dropped at the same time as the
// scrutinee would be, so they have to be considered as well.
// e.g. in `if let Some(x) = foo.lock().unwrap().baz.as_ref() { .. }` the lock will be held
// for the duration if body.
let needs_drop = needs_ordered_drop(cx, check_ty) || temporaries_need_ordered_drop(cx, let_expr);
// check that `while_let_on_iterator` lint does not trigger
if_chain! {
if keyword == "while";
if let ExprKind::MethodCall(method_path, _, _) = let_expr.kind;
if method_path.ident.name == sym::next;
if is_trait_method(cx, let_expr, sym::Iterator);
then {
return;
}
}
let result_expr = match &let_expr.kind {
ExprKind::AddrOf(_, _, borrowed) => borrowed,
ExprKind::Unary(UnOp::Deref, deref) => deref,
_ => let_expr,
};
span_lint_and_then(
cx,
REDUNDANT_PATTERN_MATCHING,
let_pat.span,
&format!("redundant pattern matching, consider using `{}`", good_method),
|diag| {
// if/while let ... = ... { ... }
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^
let expr_span = expr.span;
// if/while let ... = ... { ... }
// ^^^
let op_span = result_expr.span.source_callsite();
// if/while let ... = ... { ... }
// ^^^^^^^^^^^^^^^^^^^
let span = expr_span.until(op_span.shrink_to_hi());
let app = if needs_drop {
Applicability::MaybeIncorrect
} else {
Applicability::MachineApplicable
};
let sugg = Sugg::hir_with_macro_callsite(cx, result_expr, "_")
.maybe_par()
.to_string();
diag.span_suggestion(span, "try this", format!("{} {}.{}", keyword, sugg, good_method), app);
if needs_drop {
diag.note("this will change drop order of the result, as well as all temporaries");
diag.note("add `#[allow(clippy::redundant_pattern_matching)]` if this is important");
}
},
);
}
pub(super) fn check_match<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, op: &Expr<'_>, arms: &[Arm<'_>]) {
if arms.len() == 2 {
let node_pair = (&arms[0].pat.kind, &arms[1].pat.kind);
let found_good_method = match node_pair {
(
PatKind::TupleStruct(ref path_left, patterns_left, _),
PatKind::TupleStruct(ref path_right, patterns_right, _),
) if patterns_left.len() == 1 && patterns_right.len() == 1 => {
if let (PatKind::Wild, PatKind::Wild) = (&patterns_left[0].kind, &patterns_right[0].kind) {
find_good_method_for_match(
cx,
arms,
path_left,
path_right,
&paths::RESULT_OK,
&paths::RESULT_ERR,
"is_ok()",
"is_err()",
)
.or_else(|| {
find_good_method_for_match(
cx,
arms,
path_left,
path_right,
&paths::IPADDR_V4,
&paths::IPADDR_V6,
"is_ipv4()",
"is_ipv6()",
)
})
} else {
None
}
},
(PatKind::TupleStruct(ref path_left, patterns, _), PatKind::Path(ref path_right))
| (PatKind::Path(ref path_left), PatKind::TupleStruct(ref path_right, patterns, _))
if patterns.len() == 1 =>
{
if let PatKind::Wild = patterns[0].kind {
find_good_method_for_match(
cx,
arms,
path_left,
path_right,
&paths::OPTION_SOME,
&paths::OPTION_NONE,
"is_some()",
"is_none()",
)
.or_else(|| {
find_good_method_for_match(
cx,
arms,
path_left,
path_right,
&paths::POLL_READY,
&paths::POLL_PENDING,
"is_ready()",
"is_pending()",
)
})
} else {
None
}
},
_ => None,
};
if let Some(good_method) = found_good_method {
let span = expr.span.to(op.span);
let result_expr = match &op.kind {
ExprKind::AddrOf(_, _, borrowed) => borrowed,
_ => op,
};
span_lint_and_then(
cx,
REDUNDANT_PATTERN_MATCHING,
expr.span,
&format!("redundant pattern matching, consider using `{}`", good_method),
|diag| {
diag.span_suggestion(
span,
"try this",
format!("{}.{}", snippet(cx, result_expr.span, "_"), good_method),
Applicability::MaybeIncorrect, // snippet
);
},
);
}
}
}
#[expect(clippy::too_many_arguments)]
fn find_good_method_for_match<'a>(
cx: &LateContext<'_>,
arms: &[Arm<'_>],
path_left: &QPath<'_>,
path_right: &QPath<'_>,
expected_left: &[&str],
expected_right: &[&str],
should_be_left: &'a str,
should_be_right: &'a str,
) -> Option<&'a str> {
let left_id = cx
.typeck_results()
.qpath_res(path_left, arms[0].pat.hir_id)
.opt_def_id()?;
let right_id = cx
.typeck_results()
.qpath_res(path_right, arms[1].pat.hir_id)
.opt_def_id()?;
let body_node_pair = if match_def_path(cx, left_id, expected_left) && match_def_path(cx, right_id, expected_right) {
(&(*arms[0].body).kind, &(*arms[1].body).kind)
} else if match_def_path(cx, right_id, expected_left) && match_def_path(cx, right_id, expected_right) {
(&(*arms[1].body).kind, &(*arms[0].body).kind)
} else {
return None;
};
match body_node_pair {
(ExprKind::Lit(ref lit_left), ExprKind::Lit(ref lit_right)) => match (&lit_left.node, &lit_right.node) {
(LitKind::Bool(true), LitKind::Bool(false)) => Some(should_be_left),
(LitKind::Bool(false), LitKind::Bool(true)) => Some(should_be_right),
_ => None,
},
_ => None,
}
}
| 38.627297 | 120 | 0.48624 |
64b8a8aeec151c97385ebe8d027f1bfa19682763 | 1,171 | // hashmap1.rs
// A basket of fruits in the form of a hash map needs to be defined.
// The key represents the name of the fruit and the value represents
// how many of that particular fruit is in the basket. You have to put
// at least three different types of fruits (e.g apple, banana, mango)
// in the basket and the total count of all the fruits should be at
// least five.
//
// Make me compile and pass the tests!
//
// Execute the command `rustlings hint collections3` if you need
// hints.
// I AM DONE
use std::collections::HashMap;
fn fruit_basket() -> HashMap<String, u32> {
let mut basket = HashMap::new();
// Two bananas are already given for you :)
basket.insert(String::from("banana"), 2);
basket.insert(String::from("apple"), 3);
basket.insert(String::from("mango"), 0);
basket
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn at_least_three_types_of_fruits() {
let basket = fruit_basket();
assert!(basket.len() >= 3);
}
#[test]
fn at_least_five_fruits() {
let basket = fruit_basket();
assert!(basket
.values()
.sum::<u32>() >= 5);
}
}
| 24.914894 | 70 | 0.628523 |
260f694aad5712d35c77de39ac89b4a2f1dd19a6 | 29,909 | use std::fmt;
use std::ops::{Deref, DerefMut};
use std::convert::TryInto;
use yansi::Paint;
use either::Either;
use figment::{Figment, Provider};
use crate::{Catcher, Config, Route, Shutdown, sentinel, shield::Shield};
use crate::router::Router;
use crate::trip_wire::TripWire;
use crate::fairing::{Fairing, Fairings};
use crate::phase::{Phase, Build, Building, Ignite, Igniting, Orbit, Orbiting};
use crate::phase::{Stateful, StateRef, State};
use crate::http::uri::{self, Origin};
use crate::http::ext::IntoOwned;
use crate::error::{Error, ErrorKind};
use crate::log::PaintExt;
/// The application server itself.
///
/// # Phases
///
/// An instance of `Rocket` represents a web server and its state. It progresses
/// through three statically-enforced phases into orbit: build, ignite, orbit.
///
/// ## Build
///
/// All application and server configuration occurs during the [`Build`] phase.
/// This includes setting configuration options, mounting/registering
/// routes/catchers, managing state, and attaching fairings. This is the _only_
/// phase in which an instance can be modified. To finalize changes, an instance
/// is ignited via [`Rocket::ignite()`], progressing it into the _ignite_ phase,
/// or directly launched into orbit with [`Rocket::launch()`] which progress the
/// instance through ignite into orbit.
///
/// ## Ignite
///
/// An instance in the [`Ignite`] phase is in its final configuration, available
/// via [`Rocket::config()`]. Barring user-supplied iterior mutation,
/// application state is guaranteed to remain unchanged beyond this point. An
/// instance in the ignite phase can be launched into orbit to serve requests
/// via [`Rocket::launch()`].
///
/// ## Orbit
///
/// An instance in the [`Orbit`] phase represents a _running_ application,
/// actively serving requests.
///
/// # Launching
///
/// ## Manual Launching
///
/// To launch an instance of `Rocket`, it _must_ progress through all three
/// phases. To progress into the ignite or launch phases, a tokio `async`
/// runtime is required. The [`#[main]`](crate::main) attribute initializes a
/// Rocket-specific tokio runtime and runs the attributed `async fn` inside of
/// it:
///
/// ```rust,no_run
/// #[rocket::main]
/// async fn main() -> Result<(), rocket::Error> {
/// rocket::build()
/// .ignite().await?
/// .launch().await
/// }
/// ```
///
/// Note that [`Rocket::launch()`] automatically progresses an instance of
/// `Rocket` from any phase into orbit:
///
/// ```rust,no_run
/// #[rocket::main]
/// async fn main() -> Result<(), rocket::Error> {
/// rocket::build().launch().await
/// }
/// ```
///
/// ## Automatic Launching
///
/// Manually progressing an instance of Rocket though its phases is only
/// necessary when either an instance's finalized state is to be inspected (in
/// the _ignite_ phase) or the instance is expected to deorbit due to
/// [`Rocket::shutdown()`]. In the more common case when neither is required,
/// the [`#[launch]`](crate::launch) attribute can be used. When applied to a
/// function that returns a `Rocket<Build>`, it automatically initializes an
/// `async` runtime and launches the function's returned instance:
///
/// ```rust,no_run
/// # use rocket::launch;
/// use rocket::{Rocket, Build};
///
/// #[launch]
/// fn rocket() -> Rocket<Build> {
/// rocket::build()
/// }
/// ```
///
/// To avoid needing to import _any_ items in the common case, the `launch`
/// attribute will infer a return type written as `_` as `Rocket<Build>`:
///
/// ```rust,no_run
/// # use rocket::launch;
/// #[launch]
/// fn rocket() -> _ {
/// rocket::build()
/// }
/// ```
pub struct Rocket<P: Phase>(pub(crate) P::State);
impl Rocket<Build> {
/// Create a new `Rocket` application using the default configuration
/// provider, [`Config::figment()`].
///
/// This method is typically called through the
/// [`rocket::build()`](crate::build) alias.
///
/// # Examples
///
/// ```rust
/// # use rocket::launch;
/// #[launch]
/// fn rocket() -> _ {
/// rocket::build()
/// }
/// ```
#[inline(always)]
pub fn build() -> Self {
Rocket::custom(Config::figment())
}
/// Creates a new `Rocket` application using the supplied configuration
/// provider.
///
/// This method is typically called through the
/// [`rocket::custom()`](crate::custom()) alias.
///
/// # Example
///
/// ```rust
/// # use rocket::launch;
/// use rocket::figment::{Figment, providers::{Toml, Env, Format}};
///
/// #[launch]
/// fn rocket() -> _ {
/// let figment = Figment::from(rocket::Config::default())
/// .merge(Toml::file("MyApp.toml").nested())
/// .merge(Env::prefixed("MY_APP_").global());
///
/// rocket::custom(figment)
/// }
/// ```
pub fn custom<T: Provider>(provider: T) -> Self {
// We initialize the logger here so that logging from fairings and so on
// are visible; we use the final config to set a max log-level in ignite
crate::log::init_default();
let rocket: Rocket<Build> = Rocket(Building {
figment: Figment::from(provider),
..Default::default()
});
rocket.attach(Shield::default())
}
/// Sets the configuration provider in `self` to `provider`.
///
/// A [`Figment`] generated from the current `provider` can _always_ be
/// retrieved via [`Rocket::figment()`]. However, because the provider can
/// be changed at any point prior to ignition, a [`Config`] can only be
/// retrieved in the ignite or orbit phases, or by manually extracing one
/// from a particular figment.
///
/// # Example
///
/// ```rust
/// use rocket::Config;
/// # use std::net::Ipv4Addr;
/// # use std::path::{Path, PathBuf};
/// # type Result = std::result::Result<(), rocket::Error>;
///
/// let config = Config {
/// port: 7777,
/// address: Ipv4Addr::new(18, 127, 0, 1).into(),
/// temp_dir: PathBuf::from("/tmp/config-example"),
/// ..Config::debug_default()
/// };
///
/// # let _: Result = rocket::async_test(async move {
/// let rocket = rocket::custom(&config).ignite().await?;
/// assert_eq!(rocket.config().port, 7777);
/// assert_eq!(rocket.config().address, Ipv4Addr::new(18, 127, 0, 1));
/// assert_eq!(rocket.config().temp_dir, Path::new("/tmp/config-example"));
///
/// // Create a new figment which modifies _some_ keys the existing figment:
/// let figment = rocket.figment().clone()
/// .merge((Config::PORT, 8888))
/// .merge((Config::ADDRESS, "171.64.200.10"));
///
/// let rocket = rocket::custom(&config)
/// .configure(figment)
/// .ignite().await?;
///
/// assert_eq!(rocket.config().port, 8888);
/// assert_eq!(rocket.config().address, Ipv4Addr::new(171, 64, 200, 10));
/// assert_eq!(rocket.config().temp_dir, Path::new("/tmp/config-example"));
/// # Ok(())
/// # });
/// ```
pub fn configure<T: Provider>(mut self, provider: T) -> Self {
self.figment = Figment::from(provider);
self
}
fn load<'a, B, T, F, M>(mut self, kind: &str, base: B, items: Vec<T>, m: M, f: F) -> Self
where B: TryInto<Origin<'a>> + Clone + fmt::Display,
B::Error: fmt::Display,
M: Fn(&Origin<'a>, T) -> Result<T, uri::Error<'static>>,
F: Fn(&mut Self, T),
T: Clone + fmt::Display,
{
let mut base = base.clone().try_into()
.map(|origin| origin.into_owned())
.unwrap_or_else(|e| {
error!("invalid {} base: {}", kind, Paint::white(&base));
error_!("{}", e);
panic!("aborting due to {} base error", kind);
});
if base.query().is_some() {
warn!("query in {} base '{}' is ignored", kind, Paint::white(&base));
base.clear_query();
}
for unmounted_item in items {
let item = m(&base, unmounted_item.clone())
.unwrap_or_else(|e| {
error!("malformed URI in {} {}", kind, unmounted_item);
error_!("{}", e);
panic!("aborting due to invalid {} URI", kind);
});
f(&mut self, item)
}
self
}
/// Mounts all of the routes in the supplied vector at the given `base`
/// path. Mounting a route with path `path` at path `base` makes the route
/// available at `base/path`.
///
/// # Panics
///
/// Panics if either:
/// * the `base` mount point is not a valid static path: a valid origin
/// URI without dynamic parameters.
///
/// * any route's URI is not a valid origin URI.
///
/// **Note:** _This kind of panic is guaranteed not to occur if the routes
/// were generated using Rocket's code generation._
///
/// # Examples
///
/// Use the `routes!` macro to mount routes created using the code
/// generation facilities. Requests to the `/hello/world` URI will be
/// dispatched to the `hi` route.
///
/// ```rust,no_run
/// # #[macro_use] extern crate rocket;
/// #
/// #[get("/world")]
/// fn hi() -> &'static str {
/// "Hello!"
/// }
///
/// #[launch]
/// fn rocket() -> _ {
/// rocket::build().mount("/hello", routes![hi])
/// }
/// ```
///
/// Manually create a route named `hi` at path `"/world"` mounted at base
/// `"/hello"`. Requests to the `/hello/world` URI will be dispatched to the
/// `hi` route.
///
/// ```rust
/// # #[macro_use] extern crate rocket;
/// use rocket::{Request, Route, Data, route};
/// use rocket::http::Method;
///
/// fn hi<'r>(req: &'r Request, _: Data<'r>) -> route::BoxFuture<'r> {
/// route::Outcome::from(req, "Hello!").pin()
/// }
///
/// #[launch]
/// fn rocket() -> _ {
/// let hi_route = Route::new(Method::Get, "/world", hi);
/// rocket::build().mount("/hello", vec![hi_route])
/// }
/// ```
pub fn mount<'a, B, R>(self, base: B, routes: R) -> Self
where B: TryInto<Origin<'a>> + Clone + fmt::Display,
B::Error: fmt::Display,
R: Into<Vec<Route>>
{
self.load("route", base, routes.into(),
|base, route| route.map_base(|old| format!("{}{}", base, old)),
|r, route| r.0.routes.push(route))
}
/// Registers all of the catchers in the supplied vector, scoped to `base`.
///
/// # Panics
///
/// Panics if `base` is not a valid static path: a valid origin URI without
/// dynamic parameters.
///
/// # Examples
///
/// ```rust,no_run
/// # #[macro_use] extern crate rocket;
/// use rocket::Request;
///
/// #[catch(500)]
/// fn internal_error() -> &'static str {
/// "Whoops! Looks like we messed up."
/// }
///
/// #[catch(400)]
/// fn not_found(req: &Request) -> String {
/// format!("I couldn't find '{}'. Try something else?", req.uri())
/// }
///
/// #[launch]
/// fn rocket() -> _ {
/// rocket::build().register("/", catchers![internal_error, not_found])
/// }
/// ```
pub fn register<'a, B, C>(self, base: B, catchers: C) -> Self
where B: TryInto<Origin<'a>> + Clone + fmt::Display,
B::Error: fmt::Display,
C: Into<Vec<Catcher>>
{
self.load("catcher", base, catchers.into(),
|base, catcher| catcher.map_base(|old| format!("{}{}", base, old)),
|r, catcher| r.0.catchers.push(catcher))
}
/// Add `state` to the state managed by this instance of Rocket.
///
/// This method can be called any number of times as long as each call
/// refers to a different `T`.
///
/// Managed state can be retrieved by any request handler via the
/// [`State`](crate::State) request guard. In particular, if a value of type `T`
/// is managed by Rocket, adding `State<T>` to the list of arguments in a
/// request handler instructs Rocket to retrieve the managed value.
///
/// # Panics
///
/// Panics if state of type `T` is already being managed.
///
/// # Example
///
/// ```rust,no_run
/// # #[macro_use] extern crate rocket;
/// use rocket::State;
///
/// struct MyInt(isize);
/// struct MyString(String);
///
/// #[get("/int")]
/// fn int(state: &State<MyInt>) -> String {
/// format!("The stateful int is: {}", state.0)
/// }
///
/// #[get("/string")]
/// fn string(state: &State<MyString>) -> &str {
/// &state.0
/// }
///
/// #[launch]
/// fn rocket() -> _ {
/// rocket::build()
/// .manage(MyInt(10))
/// .manage(MyString("Hello, managed state!".to_string()))
/// .mount("/", routes![int, string])
/// }
/// ```
pub fn manage<T>(self, state: T) -> Self
where T: Send + Sync + 'static
{
let type_name = std::any::type_name::<T>();
if !self.state.set(state) {
error!("state for type '{}' is already being managed", type_name);
panic!("aborting due to duplicately managed state");
}
self
}
/// Attaches a fairing to this instance of Rocket. No fairings are eagerly
/// excuted; fairings are executed at their appropriate time.
///
/// If the attached fairing is _fungible_ and a fairing of the same name
/// already exists, this fairing replaces it.
///
/// # Example
///
/// ```rust,no_run
/// # #[macro_use] extern crate rocket;
/// use rocket::Rocket;
/// use rocket::fairing::AdHoc;
///
/// #[launch]
/// fn rocket() -> _ {
/// rocket::build()
/// .attach(AdHoc::on_liftoff("Liftoff Message", |_| Box::pin(async {
/// println!("We have liftoff!");
/// })))
/// }
/// ```
pub fn attach<F: Fairing>(mut self, fairing: F) -> Self {
self.fairings.add(Box::new(fairing));
self
}
/// Returns a `Future` that transitions this instance of `Rocket` into the
/// _ignite_ phase.
///
/// When `await`ed, the future runs all _ignite_ fairings in serial,
/// [attach](Rocket::attach()) order, and verifies that `self` represents a
/// valid instance of `Rocket` ready for launch. This means that:
///
/// * All ignite fairings succeeded.
/// * A valid [`Config`] was extracted from [`Rocket::figment()`].
/// * If `secrets` are enabled, the extracted `Config` contains a safe
/// secret key.
/// * There are no [`Route#collisions`] or [`Catcher#collisions`]
/// collisions.
/// * No [`Sentinel`](crate::Sentinel) triggered an abort.
///
/// If any of these conditions fail to be met, a respective [`Error`] is
/// returned.
///
/// [configured]: Rocket::figment()
///
/// # Example
///
/// ```rust
/// use rocket::fairing::AdHoc;
///
/// #[rocket::main]
/// async fn main() -> Result<(), rocket::Error> {
/// let rocket = rocket::build()
/// # .configure(rocket::Config::debug_default())
/// .attach(AdHoc::on_ignite("Manage State", |rocket| async move {
/// rocket.manage(String::from("managed string"))
/// }));
///
/// // No fairings are run until ignition occurs.
/// assert!(rocket.state::<String>().is_none());
///
/// let rocket = rocket.ignite().await?;
/// assert_eq!(rocket.state::<String>().unwrap(), "managed string");
///
/// Ok(())
/// }
/// ```
pub async fn ignite(mut self) -> Result<Rocket<Ignite>, Error> {
self = Fairings::handle_ignite(self).await;
self.fairings.audit().map_err(|f| ErrorKind::FailedFairings(f.to_vec()))?;
// Extract the configuration; initialize the logger.
#[allow(unused_mut)]
let mut config = self.figment.extract::<Config>().map_err(ErrorKind::Config)?;
crate::log::init(&config);
// Check for safely configured secrets.
#[cfg(feature = "secrets")]
if !config.secret_key.is_provided() {
let profile = self.figment.profile();
if profile != Config::DEBUG_PROFILE {
return Err(Error::new(ErrorKind::InsecureSecretKey(profile.clone())));
}
if config.secret_key.is_zero() {
config.secret_key = crate::config::SecretKey::generate()
.unwrap_or(crate::config::SecretKey::zero());
}
};
// Initialize the router; check for collisions.
let mut router = Router::new();
self.routes.clone().into_iter().for_each(|r| router.add_route(r));
self.catchers.clone().into_iter().for_each(|c| router.add_catcher(c));
router.finalize().map_err(ErrorKind::Collisions)?;
// Finally, freeze managed state.
self.state.freeze();
// Log everything we know: config, routes, catchers, fairings.
// TODO: Store/print managed state type names?
config.pretty_print(self.figment());
log_items("🛰 ", "Routes", self.routes(), |r| &r.uri.base, |r| &r.uri);
log_items("👾 ", "Catchers", self.catchers(), |c| &c.base, |c| &c.base);
self.fairings.pretty_print();
// Ignite the rocket.
let rocket: Rocket<Ignite> = Rocket(Igniting {
router, config,
shutdown: Shutdown(TripWire::new()),
figment: self.0.figment,
fairings: self.0.fairings,
state: self.0.state,
});
// Query the sentinels, abort if requested.
let sentinels = rocket.routes().flat_map(|r| r.sentinels.iter());
sentinel::query(sentinels, &rocket).map_err(ErrorKind::SentinelAborts)?;
Ok(rocket)
}
}
fn log_items<T, I, B, O>(e: &str, t: &str, items: I, base: B, origin: O)
where T: fmt::Display + Copy, I: Iterator<Item = T>,
B: Fn(&T) -> &Origin<'_>, O: Fn(&T) -> &Origin<'_>
{
let mut items: Vec<_> = items.collect();
if !items.is_empty() {
launch_info!("{}{}:", Paint::emoji(e), Paint::magenta(t));
}
items.sort_by_key(|i| origin(i).path().as_str().chars().count());
items.sort_by_key(|i| origin(i).path().segments().len());
items.sort_by_key(|i| base(i).path().as_str().chars().count());
items.sort_by_key(|i| base(i).path().segments().len());
items.iter().for_each(|i| launch_info_!("{}", i));
}
impl Rocket<Ignite> {
/// Returns the finalized, active configuration. This is guaranteed to
/// remain stable through ignition and into orbit.
///
/// # Example
///
/// ```rust,no_run
/// #[rocket::main]
/// async fn main() -> Result<(), rocket::Error> {
/// let rocket = rocket::build().ignite().await?;
/// let config = rocket.config();
/// Ok(())
/// }
/// ```
pub fn config(&self) -> &Config {
&self.config
}
/// Returns a handle which can be used to trigger a shutdown and detect a
/// triggered shutdown.
///
/// A completed graceful shutdown resolves the future returned by
/// [`Rocket::launch()`]. If [`Shutdown::notify()`] is called _before_ an
/// instance is launched, it will be immediately shutdown after liftoff. See
/// [`Shutdown`] and [`config::Shutdown`](crate::config::Shutdown) for
/// details on graceful shutdown.
///
/// # Example
///
/// ```rust,no_run
/// # use std::time::Duration;
/// use rocket::tokio::{self, time};
///
/// #[rocket::main]
/// async fn main() -> Result<(), rocket::Error> {
/// let rocket = rocket::build().ignite().await?;
///
/// let shutdown = rocket.shutdown();
/// tokio::spawn(async move {
/// time::sleep(time::Duration::from_secs(5)).await;
/// shutdown.notify();
/// });
///
/// // The `launch()` future resolves after ~5 seconds.
/// let result = rocket.launch().await;
/// assert!(result.is_ok());
///
/// Ok(())
/// }
/// ```
pub fn shutdown(&self) -> Shutdown {
self.shutdown.clone()
}
fn into_orbit(self) -> Rocket<Orbit> {
Rocket(Orbiting {
router: self.0.router,
fairings: self.0.fairings,
figment: self.0.figment,
config: self.0.config,
state: self.0.state,
shutdown: self.0.shutdown,
})
}
async fn _local_launch(self) -> Rocket<Orbit> {
let rocket = self.into_orbit();
rocket.fairings.handle_liftoff(&rocket).await;
launch_info!("{}{}", Paint::emoji("🚀 "),
Paint::default("Rocket has launched into local orbit").bold());
rocket
}
async fn _launch(self) -> Result<(), Error> {
self.into_orbit().default_tcp_http_server(|rkt| Box::pin(async move {
rkt.fairings.handle_liftoff(&rkt).await;
let proto = rkt.config.tls_enabled().then(|| "https").unwrap_or("http");
let addr = format!("{}://{}:{}", proto, rkt.config.address, rkt.config.port);
launch_info!("{}{} {}",
Paint::emoji("🚀 "),
Paint::default("Rocket has launched from").bold(),
Paint::default(addr).bold().underline());
})).await
}
}
impl Rocket<Orbit> {
/// Returns the finalized, active configuration. This is guaranteed to
/// remain stable after [`Rocket::ignite()`], through ignition and into
/// orbit.
///
/// # Example
///
/// ```rust,no_run
/// # #[macro_use] extern crate rocket;
/// use rocket::fairing::AdHoc;
///
/// #[launch]
/// fn rocket() -> _ {
/// rocket::build()
/// .attach(AdHoc::on_liftoff("Config", |rocket| Box::pin(async move {
/// println!("Rocket launch config: {:?}", rocket.config());
/// })))
/// }
/// ```
pub fn config(&self) -> &Config {
&self.config
}
/// Returns a handle which can be used to trigger a shutdown and detect a
/// triggered shutdown.
///
/// A completed graceful shutdown resolves the future returned by
/// [`Rocket::launch()`]. See [`Shutdown`] and
/// [`config::Shutdown`](crate::config::Shutdown) for details on graceful
/// shutdown.
///
/// # Example
///
/// ```rust,no_run
/// # #[macro_use] extern crate rocket;
/// use rocket::tokio::{self, time};
/// use rocket::fairing::AdHoc;
///
/// #[launch]
/// fn rocket() -> _ {
/// rocket::build()
/// .attach(AdHoc::on_liftoff("Shutdown", |rocket| Box::pin(async move {
/// let shutdown = rocket.shutdown();
/// tokio::spawn(async move {
/// time::sleep(time::Duration::from_secs(5)).await;
/// shutdown.notify();
/// });
/// })))
/// }
/// ```
pub fn shutdown(&self) -> Shutdown {
self.shutdown.clone()
}
}
impl<P: Phase> Rocket<P> {
/// Returns an iterator over all of the routes mounted on this instance of
/// Rocket. The order is unspecified.
///
/// # Example
///
/// ```rust
/// # use rocket::*;
/// use rocket::Rocket;
/// use rocket::fairing::AdHoc;
///
/// #[get("/hello")]
/// fn hello() -> &'static str {
/// "Hello, world!"
/// }
///
/// let rocket = rocket::build()
/// .mount("/", routes![hello])
/// .mount("/hi", routes![hello]);
///
/// assert_eq!(rocket.routes().count(), 2);
/// assert!(rocket.routes().any(|r| r.uri == "/hello"));
/// assert!(rocket.routes().any(|r| r.uri == "/hi/hello"));
/// ```
pub fn routes(&self) -> impl Iterator<Item = &Route> {
match self.0.as_state_ref() {
StateRef::Build(p) => Either::Left(p.routes.iter()),
StateRef::Ignite(p) => Either::Right(p.router.routes()),
StateRef::Orbit(p) => Either::Right(p.router.routes()),
}
}
/// Returns an iterator over all of the catchers registered on this instance
/// of Rocket. The order is unspecified.
///
/// # Example
///
/// ```rust
/// # use rocket::*;
/// use rocket::Rocket;
/// use rocket::fairing::AdHoc;
///
/// #[catch(404)] fn not_found() -> &'static str { "Nothing here, sorry!" }
/// #[catch(500)] fn just_500() -> &'static str { "Whoops!?" }
/// #[catch(default)] fn some_default() -> &'static str { "Everything else." }
///
/// let rocket = rocket::build()
/// .register("/foo", catchers![not_found])
/// .register("/", catchers![just_500, some_default]);
///
/// assert_eq!(rocket.catchers().count(), 3);
/// assert!(rocket.catchers().any(|c| c.code == Some(404) && c.base == "/foo"));
/// assert!(rocket.catchers().any(|c| c.code == Some(500) && c.base == "/"));
/// assert!(rocket.catchers().any(|c| c.code == None && c.base == "/"));
/// ```
pub fn catchers(&self) -> impl Iterator<Item = &Catcher> {
match self.0.as_state_ref() {
StateRef::Build(p) => Either::Left(p.catchers.iter()),
StateRef::Ignite(p) => Either::Right(p.router.catchers()),
StateRef::Orbit(p) => Either::Right(p.router.catchers()),
}
}
/// Returns `Some` of the managed state value for the type `T` if it is
/// being managed by `self`. Otherwise, returns `None`.
///
/// # Example
///
/// ```rust
/// #[derive(PartialEq, Debug)]
/// struct MyState(&'static str);
///
/// let rocket = rocket::build().manage(MyState("hello!"));
/// assert_eq!(rocket.state::<MyState>().unwrap(), &MyState("hello!"));
/// ```
pub fn state<T: Send + Sync + 'static>(&self) -> Option<&T> {
match self.0.as_state_ref() {
StateRef::Build(p) => p.state.try_get(),
StateRef::Ignite(p) => p.state.try_get(),
StateRef::Orbit(p) => p.state.try_get(),
}
}
/// Returns the figment derived from the configuration provider set for
/// `self`. To extract a typed config, prefer to use
/// [`AdHoc::config()`](crate::fairing::AdHoc::config()).
///
/// # Example
///
/// ```rust
/// let rocket = rocket::build();
/// let figment = rocket.figment();
/// ```
pub fn figment(&self) -> &Figment {
match self.0.as_state_ref() {
StateRef::Build(p) => &p.figment,
StateRef::Ignite(p) => &p.figment,
StateRef::Orbit(p) => &p.figment,
}
}
pub(crate) async fn local_launch(self) -> Result<Rocket<Orbit>, Error> {
let rocket = match self.0.into_state() {
State::Build(s) => Rocket::from(s).ignite().await?._local_launch().await,
State::Ignite(s) => Rocket::from(s)._local_launch().await,
State::Orbit(s) => Rocket::from(s)
};
Ok(rocket)
}
/// Returns a `Future` that transitions this instance of `Rocket` from any
/// phase into the _orbit_ phase. When `await`ed, the future drives the
/// server forward, listening for and dispatching requests to mounted routes
/// and catchers.
///
/// In addition to all of the processes that occur during
/// [ignition](Rocket::ignite()), a successful launch results in _liftoff_
/// fairings being executed _after_ binding to any respective network
/// interfaces but before serving the first request. Liftoff fairings are
/// run concurrently; resolution of all fairings is `await`ed before
/// resuming request serving.
///
/// The `Future` resolves as an `Err` if any of the following occur:
///
/// * there is an error igniting; see [`Rocket::ignite()`].
/// * there is an I/O error starting the server.
/// * an unrecoverable, system-level error occurs while running.
///
/// The `Future` resolves as an `Ok` if any of the following occur:
///
/// * graceful shutdown via [`Shutdown::notify()`] completes.
///
/// The `Future` does not resolve otherwise.
///
/// # Error
///
/// If there is a problem starting the application, an [`Error`] is
/// returned. Note that a value of type `Error` panics if dropped without
/// first being inspected. See the [`Error`] documentation for more
/// information.
///
/// # Example
///
/// ```rust,no_run
/// #[rocket::main]
/// async fn main() {
/// let result = rocket::build().launch().await;
///
/// // this is reachable only after `Shutdown::notify()` or `Ctrl+C`.
/// println!("Rocket: deorbit.");
/// }
/// ```
pub async fn launch(self) -> Result<(), Error> {
match self.0.into_state() {
State::Build(s) => Rocket::from(s).ignite().await?._launch().await,
State::Ignite(s) => Rocket::from(s)._launch().await,
State::Orbit(_) => Ok(())
}
}
}
#[doc(hidden)]
impl<P: Phase> Deref for Rocket<P> {
type Target = P::State;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc(hidden)]
impl<P: Phase> DerefMut for Rocket<P> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<P: Phase> fmt::Debug for Rocket<P> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
| 34.378161 | 93 | 0.545923 |
dd6ac92e2b7ebcdf0c2243553a73fdbe692e4d16 | 264 | #[macro_use]
pub mod console;
pub mod time;
pub mod bootstrap;
pub mod lcpu;
pub mod irq;
pub mod spinlock;
//pub mod memory;
//
//pub mod thread;
mod intctrl;
mod constants;
// supervisor binary interface
mod sbi;
// 寄存器
mod reg;
//mod mcause;
//mod mstauts;
| 11.478261 | 30 | 0.700758 |
dd27078d1036ea9c6cfc9a60ea9b93b5d5c69de9 | 4,814 | use pelite::pe::{Pe, PeView};
use winapi::um::consoleapi::AllocConsole;
use winapi::um::handleapi::CloseHandle;
use winapi::um::handleapi::INVALID_HANDLE_VALUE;
use winapi::um::memoryapi::ReadProcessMemory;
use winapi::um::processthreadsapi::ExitProcess;
use winapi::um::processthreadsapi::GetCurrentProcess;
use winapi::um::processthreadsapi::GetCurrentThreadId;
use winapi::um::processthreadsapi::OpenThread;
use winapi::um::processthreadsapi::SuspendThread;
use winapi::um::tlhelp32::CreateToolhelp32Snapshot;
use winapi::um::tlhelp32::Thread32First;
use winapi::um::tlhelp32::Thread32Next;
use winapi::um::tlhelp32::TH32CS_SNAPTHREAD;
use winapi::um::tlhelp32::THREADENTRY32;
use winapi::um::winbase::IsBadReadPtr;
use winapi::um::winnt::THREAD_ALL_ACCESS;
use winapi::um::{libloaderapi::GetModuleHandleA, processthreadsapi::GetCurrentProcessId};
pub struct ModuleInfo {
pub base: usize,
pub version: String,
pub image_size: usize,
}
impl ModuleInfo {
pub fn create() -> Self {
unsafe {
let base = GetModuleHandleA(core::ptr::null()) as *const _ as usize;
let module = PeView::module(base as *const _);
let code_base = module.optional_header().BaseOfCode as usize;
let code_size = module.optional_header().SizeOfCode as usize;
let image_size = code_base + code_size;
let resources = module.resources().expect("Failed to open resources");
let mut version = "".to_string();
if let Ok(version_info) = resources.version_info() {
if let Some(lang) = version_info.translation().get(0) {
if let Some(product_version) = version_info.value(*lang, "ProductVersion") {
version = product_version.replace("\0", "").to_string();
}
}
}
Self {
base,
version,
image_size,
}
}
}
pub fn scan_memory<F, R>(&self, callback: F) -> Option<R>
where
F: Fn(&[u8]) -> Option<R>,
{
let mut remain = self.image_size as usize;
let process = unsafe { GetCurrentProcess() };
let mut buffer = [0u8; 0x2000];
let mut last_page_size = 0;
loop {
if remain == 0 {
return None;
}
let page_size = if remain % 0x1000 != 0 {
remain % 0x1000
} else {
0x1000
};
remain -= page_size;
unsafe {
let offset = (self.base + remain) as *const _;
if IsBadReadPtr(offset, page_size) != 0 {
last_page_size = 0;
continue;
}
let copy_src = &buffer[0..last_page_size] as *const _ as *const u8;
let copy_dst =
&mut buffer[page_size..page_size + last_page_size] as *mut _ as *mut u8;
core::ptr::copy(copy_src, copy_dst, last_page_size);
let read_dst = &mut buffer[0..page_size] as *mut _ as *mut _;
if ReadProcessMemory(process, offset, read_dst, page_size, core::ptr::null_mut())
== 0
{
last_page_size = 0;
continue;
}
}
if let Some(result) = callback(&buffer[0..page_size + last_page_size]) {
return Some(result);
}
}
}
}
pub fn pause_threads() {
unsafe {
let process = GetCurrentProcessId();
let current_thread_id = GetCurrentThreadId();
let snapshot = CreateToolhelp32Snapshot(TH32CS_SNAPTHREAD, process);
if snapshot == INVALID_HANDLE_VALUE {
panic!("Snapshot invalid handle!");
}
let mut te32: THREADENTRY32 = core::mem::zeroed();
te32.dwSize = core::mem::size_of::<THREADENTRY32>() as u32;
if Thread32First(snapshot, &mut te32) == 0 {
CloseHandle(snapshot);
panic!("Failed to iterate thread!");
}
loop {
if te32.th32OwnerProcessID == process && te32.th32ThreadID != current_thread_id {
let thread = OpenThread(THREAD_ALL_ACCESS, 0, te32.th32ThreadID);
if thread == INVALID_HANDLE_VALUE {
panic!("Thread invalid handle!");
}
SuspendThread(thread);
CloseHandle(thread);
}
if Thread32Next(snapshot, &mut te32) == 0 {
break;
}
}
CloseHandle(snapshot);
}
}
pub fn alloc_console() {
unsafe {
AllocConsole();
}
}
pub fn exit_process(code: u32) {
unsafe {
ExitProcess(code);
}
}
| 35.397059 | 97 | 0.55484 |
91c496ec1e608b6b811867d9a1de5582493bbc94 | 1,072 | use actix::prelude::*;
use std::any::TypeId;
use crate::broker::Broker;
use crate::msgs::*;
/// The `BrokerIssue` provides functions to issue messages to any subscribers.
pub trait BrokerIssue
where
Self: Actor,
<Self as Actor>::Context: AsyncContext<Self>,
{
/// Asynchronously issue a message.
fn issue_async<M: BrokerMsg>(&self, msg: M) {
let broker = Broker::from_registry();
broker.do_send(IssueAsync(msg, TypeId::of::<Self>()));
}
/// Synchronously issue a message.
/// This also causes the broker to synchronously forward those messages on to any subscribers
/// before handling any other messages.
fn issue_sync<M: BrokerMsg>(&self, msg: M, ctx: &mut Self::Context) {
let broker = Broker::from_registry();
broker
.send(IssueSync(msg, TypeId::of::<Self>()))
.into_actor(self)
.map_err(|_, _, _| ())
.map(|_, _, _| ())
.wait(ctx);
}
}
impl<A> BrokerIssue for A
where
A: Actor,
<A as Actor>::Context: AsyncContext<A>,
{
}
| 26.8 | 97 | 0.608209 |
ed989b0534f3dbbb51790f4307b618e994a78caf | 157 | extern crate rand;
use anyhow::Result;
mod cli;
mod crypto;
use crate::cli::cli as hexagon_cli;
fn main() -> Result<()> {
hexagon_cli::run();
Ok(())
}
| 13.083333 | 35 | 0.636943 |
03557484c2aa77035c1f37736e0e1a10b18c2d6a | 42,481 | //! Utility to convert an interface for the Sensel, represented
//! as a subset of SVG, to the Muses interface JSON format.
//!
//! Pass it an interface.svg file (format described in docs/interface.md)
//! For a pretty printed version of the JSON pipe through something like jsonpp.
//!
//!
//! Copyright: Benedict R. Gaster (2019)
extern crate svg;
extern crate usvg;
extern crate serde_json;
extern crate getopts;
use getopts::Options;
use std::env;
#[macro_use]
extern crate log;
extern crate env_logger;
extern crate picto;
use picto::color::*;
use serde_json::json;
mod path_convert;
mod stroke_convert;
use path_convert::convert_path;
use stroke_convert::convert_stroke;
use usvg::Color;
use lyon::tessellation::{
VertexBuffers, FillOptions, StrokeOptions, StrokeVertex, TessellationResult, LineJoin, LineCap};
use lyon::tessellation::basic_shapes::{fill_circle, fill_rectangle, stroke_polyline};
use lyon::math::{vector, point, Point};
use lyon::tessellation::geometry_builder::{BuffersBuilder};
use lyon::path::{Path};
use lyon::tessellation::{FillTessellator, StrokeTessellator};
use lyon::tessellation::geometry_builder::{GeometryBuilder};
use lyon::tessellation::geometry_builder::{VertexConstructor};
use lyon::tessellation;
use euclid::{Transform2D, TypedPoint2D, TypedPoint3D, TypedVector3D};
use svg::node::element::path::{Command, Data};
use svg::node::element::tag::{Path,Rectangle,Polygon,Circle};
use svg::node::{Value};
use svg::parser::Event;
use std::cmp::{max, min};
//-----------------------------------------------------------------------------
// constants
const SENSEL_WIDTH: u32 = 230;
const SENSEL_HEIGHT: u32 = 130;
const SENSEL_DEVICE: &'static str = "sensel";
const LIGHTPAD_WIDTH: u32 = 15;
const LIGHTPAD_HEIGHT: u32 = 15;
const LIGHTPAD_DEVICE: &'static str = "lightpad";
const CUSTOM_DEVICE: &'static str = "custom_device";
const UNSUPPORTED_DEVICE: &'static str = "unknown";
// SVG controller attributes
const INTERFACE_OSC_ADDRESS_ATTR: &'static str ="interface_osc_address";
const INTERFACE_TYPE_ATTR: &'static str = "interface_type";
const INTERFACE_OSC_ARGS_ATTR: &'static str = "interface_osc_args";
const INTERFACE_MIN_ATTR: &'static str = "min";
const INTERFACE_MAX_ATTR: &'static str = "max";
//-----------------------------------------------------------------------------
// tessellation and rasterization
trait Buffer<T> {
fn set(&mut self, x: i32, y: i32, v: &T);
}
// Rasterize to an image, used for creating visuals of the interface
// as we are working with SVGs this is not likely to be needed very often!
impl Buffer<picto::color::Rgb> for picto::buffer::Rgb {
fn set(&mut self, x: i32, y: i32, v: &picto::color::Rgb) {
self.set(x as u32, y as u32, v);
}
}
type ID = u32;
struct Interface {
/// buffer respresenting the sensel
sensel: Vec<Vec<ID>>,
/// unique id, 0 is reserved for non id
next_id: ID,
}
impl Interface {
pub fn new(width: u32, height: u32) -> Self {
Self {
sensel: vec![vec![0u32; height as usize]; width as usize],
next_id: 1,
}
}
pub fn id(&mut self) -> ID {
let r = self.next_id;
self.next_id = self.next_id + 1;
r
}
pub fn number_ids(&self) -> u32 {
self.next_id
}
pub fn to_json(&self) -> serde_json::Value {
json!(self.sensel)
}
}
impl Buffer<u32> for Interface {
fn set(&mut self, x: i32, y: i32, v: &u32) {
self.sensel[x as usize][y as usize] = *v;
}
}
// first we implement a simple Bary centric rasterizer (if you want to know more about this
// stuff you could check out the excellent introduction here:
// https://github.com/ssloy/tinyrenderer/wiki/Lesson-0:-getting-started).
// although it is important to note that we are not trying to implement an OpenGL rendering pipeline,
// we simply want to rasterise triangles.
type Point2i = TypedPoint2D<i32, i32>;
type Vec3f = TypedVector3D<f32, f32>;
type Triangle = [Point2i;3];
fn point2i(x: i32, y: i32) -> Point2i {
Point2i::new(x,y)
}
fn point2i_to_point(p: Point2i) -> TypedPoint2D<f32, euclid::UnknownUnit> {
point(p.x as f32, p.y as f32)
}
fn barycentric(points: Triangle, p: Point2i) -> Vec3f {
let u: Vec3f = Vec3f::new((points[2].x - points[0].x) as f32, (points[1].x-points[0].x) as f32, (points[0].x - p.x) as f32).cross(
Vec3f::new((points[2].y - points[0].y) as f32, (points[1].y-points[0].y) as f32, (points[0].y - p.y) as f32));
// check to see if degenerate, if so return something with negative coordinates
if u.z.abs() < 1.0 {
Vec3f::new(-1.0, 1.0, 1.0)
}
else {
Vec3f::new(1.0 - (u.x+u.y) / u.z, u.y/u.z, u.x/u.z)
}
}
fn rasterize_triangle_old(
width: u32, height: u32,
points: Triangle,
buffer: &mut picto::buffer::Rgb) {
// calulate min/max bounding box, cliping to inside the sensel viewport
let bound_box_max = Point2i::new(
min(width as i32, max(points[0].x, max(points[1].x, points[2].x))),
min(height as i32, max(points[0].y, max(points[1].y, points[2].y))));
let bound_box_min = Point2i::new(
max(0, min(points[0].x, min(points[1].x, points[2].x))),
max(0, min(points[0].y, min(points[1].y, points[2].y))));
// now iterate over the bounding box
for x in bound_box_min.x..bound_box_max.x {
for y in bound_box_min.y..bound_box_max.y {
let bc_screen = barycentric(points, Point2i::new(x,y));
if bc_screen.x >= 0.0 && bc_screen.y >= 0.0 && bc_screen.z >= 0.0 {
buffer.set(x as u32, y as u32, &Rgb::new(0.0, 0.0, 0.0));
}
}
}
}
fn rasterize_triangle<T, B> (
width: u32, height: u32,
points: Triangle, buffer: &mut B, v: &T)
where B : Buffer<T> {
// calulate min/max bounding box, cliping to inside the sensel viewport
let bound_box_max = Point2i::new(
min(width as i32, max(points[0].x, max(points[1].x, points[2].x))),
min(height as i32, max(points[0].y, max(points[1].y, points[2].y))));
let bound_box_min = Point2i::new(
max(0, min(points[0].x, min(points[1].x, points[2].x))),
max(0, min(points[0].y, min(points[1].y, points[2].y))));
// now iterate over the bounding box
for x in bound_box_min.x..bound_box_max.x {
for y in bound_box_min.y..bound_box_max.y {
let bc_screen = barycentric(points, Point2i::new(x,y));
if bc_screen.x >= 0.0 && bc_screen.y >= 0.0 && bc_screen.z >= 0.0 {
buffer.set(x, y, v);
}
}
}
}
#[derive(Debug, Clone, Copy)]
#[repr(C)]
pub struct Vertex {
pub position: [f32; 3],
pub color: [f32; 4],
}
impl Vertex {
pub fn new(position: [f32;3], colour: [f32;4]) -> Vertex {
Vertex {
position: position,
color: colour,
}
}
pub fn to_point(&self) -> Point2i {
point2i(self.position[0] as i32, self.position[1] as i32)
}
}
// A simple vertex constructor
pub struct VertexCtor {
pub color: [f32; 4],
pub transform: Transform2D<f32>,
}
impl Default for VertexCtor {
fn default() -> VertexCtor {
VertexCtor {
color : [0.0,0.0,0.0,0.0],
transform: Transform2D::identity(),
}
}
}
impl VertexConstructor<tessellation::FillVertex, Vertex> for VertexCtor {
fn new_vertex(&mut self, vertex: tessellation::FillVertex) -> Vertex {
assert!(!vertex.position.x.is_nan());
assert!(!vertex.position.y.is_nan());
let tp = self.transform.transform_point(&vertex.position);
let v = tp.to_array();
Vertex {
position: [v[0], v[1], 0.0 as f32],
color: self.color,
}
}
}
impl VertexConstructor<tessellation::StrokeVertex, Vertex> for VertexCtor {
fn new_vertex(&mut self, vertex: tessellation::StrokeVertex) -> Vertex {
let tp = self.transform.transform_point(&vertex.position);
let v = tp.to_array();
let multi = 1.0;
Vertex {
position: [v[0], v[1], 0.0 as f32],
color: self.color,
}
}
}
pub type Mesh = VertexBuffers<Vertex, u16>;
pub const FALLBACK_COLOR: Color = Color {
red: 0,
green: 0,
blue: 0,
};
#[derive(Debug)]
pub enum Error {
/// Only `svg` and `svgz` suffixes are supported.
InvalidFileSuffix,
/// Failed to open the provided file.
FileOpenFailed,
/// Only UTF-8 content are supported.
NotAnUtf8Str,
/// Compressed SVG must use the GZip algorithm.
MalformedGZip,
InvalidSize,
}
/// generates a set of verts and vert indices for a SVG path
// the implementation for this one is more compilated that the other shapes below, simply because it is a path
// and thus can contain arc, bezier curves, and so on. to avoid having to handle everything we use usvg to simpify
// first and then tesserlate the result.
fn path_tessellate(path: &str) -> Mesh {
let mut mesh = Mesh::new();
// create a trivial SVG with our path
// TODO: probably a less overkill way to do this, i.e. without having to create a complete SVG, but in
// truth this seems like a nice easy and functional path and so not sure it's worth it :-)
let svg = format!(r#"<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" height="140mm" width="240mm"
viewBox="0 0 240 140" version="1.11.1">
<path d="{}" />
</svg>"#, path);
// use usvg to simplify our svg and Lyon to tesselate
let mut fill_tess = FillTessellator::new();
let mut stroke_tess = StrokeTessellator::new();
let transform = Transform2D::identity();
let rtree = usvg::Tree::from_str(&svg, &usvg::Options::default()).unwrap();
for node in rtree.root().descendants() {
if let usvg::NodeKind::Path(ref p) = *node.borrow() {
if let Some(ref fill) = p.fill {
fill_tess.tessellate_path(
convert_path(p),
&FillOptions::tolerance(0.01),
&mut BuffersBuilder::new(
&mut mesh,
VertexCtor {
color : [0.0,0.0,0.0,0.0],
transform
}
),
).expect("Error during tesselation!");
}
if let Some(ref stroke) = p.stroke {
let (stroke_color, stroke_opts) = convert_stroke(stroke);
let _ = stroke_tess.tessellate_path(
convert_path(p),
&stroke_opts.with_tolerance(0.01).with_line_width(1.0),
&mut BuffersBuilder::new(
&mut mesh,
VertexCtor {
color : [0.0,0.0,0.0,0.0],
transform
},
),
);
}
}
}
mesh
}
/// generate a set of verts and vert indices for a SVG rect
/// (this will, of course, generate, just two triangles)
fn rect_tessellate(x: i32, y: i32, width: i32, height: i32) -> Mesh {
let mut mesh = Mesh::new();
let fill_options = FillOptions::tolerance(0.01);
let transform = Transform2D::identity();
// tessellate onto the mesh
fill_rectangle(
&lyon::math::rect(x as f32, y as f32, width as f32, height as f32),
&fill_options,
&mut BuffersBuilder::new(
&mut mesh,
VertexCtor {
color : [0.0,0.0,0.0,0.0],
transform
})
).unwrap();
mesh
}
fn circle_tessellate(cpoint: Point2i, r: i32) -> Mesh {
let mut mesh = Mesh::new();
let fill_options = FillOptions::tolerance(0.01);
let transform = Transform2D::identity();
// tessellate onto the mesh
fill_circle(
point2i_to_point(cpoint),
r as f32,
&fill_options,
&mut BuffersBuilder::new(
&mut mesh,
VertexCtor{
color : [0.0,0.0,0.0,0.0],
transform
} ),
).unwrap();
mesh
}
/// generate a set of verts and vert indices for a SVG polygon
/// (this will, of course, generate, just two triangles)
fn polygon_tessellate(points: Vec<Point2i>) -> Mesh {
let mut mesh = Mesh::new();
let fill_options = FillOptions::tolerance(0.01);
let mut tessellator_fill = FillTessellator::new();
let transform = Transform2D::identity();
let mut builder = Path::builder();
builder.move_to(point2i_to_point(points[0]));
// TODO might what add check that it is actually closed, it should be it is a SVG polygon
for i in 1..points.len() {
builder.line_to(point2i_to_point(points[i]));
}
let path = builder.build();
tessellator_fill.tessellate_path(
&path,
&fill_options,
&mut BuffersBuilder::new(
&mut mesh,
VertexCtor{
color : [0.0,0.0,0.0,0.0],
transform: transform,
}),
).unwrap();
mesh
}
/// rasterize a mesh to a given buffer
fn rasterize<T,B>(
width: u32, height: u32,
mesh: &Mesh, buffer: &mut B, v: &T)
where B : Buffer<T> {
for indices in mesh.indices.chunks(3) {
let points = [
mesh.vertices[indices[0] as usize].to_point(),
mesh.vertices[indices[1] as usize].to_point(),
mesh.vertices[indices[2] as usize].to_point()];
rasterize_triangle(width, height, points, buffer, v);
}
}
//-----------------------------------------------------------------------------
/// convert pixels to mm
/// assumption is that DPI = 72
fn px_to_mm(px: f64) -> f64 {
px * 0.352777778
}
/// convert types to JSON names
fn handle_type(tag: &str) -> &str {
match tag {
"noteon" => {
"noteon"
},
"noteoff" => {
"noteoff"
},
"pad" => {
"pad"
},
"endless" => {
"endless"
},
"none" => {
"none"
},
"border" => {
"none"
},
"vert_slider" => {
"vert_slider"
},
"horz_slider" => {
"horz_slider"
},
_ => {
warn!("unreconized type {}", tag);
""
}
}
}
/// convert an SVG int or float to i32
/// assume that these attributes are in px and convert to mm
/// the maths is all a little rough here and would need to be fixed for a more robust
/// implementation
fn handle_int_attribute(v: &str, px: bool) -> i32 {
if px {
px_to_mm(v.parse::<f64>().unwrap()).round() as i32
}
else {
(v.parse::<f64>().unwrap()).round() as i32
}
}
fn handle_argument(v: &str) -> serde_json::Value {
match v.parse::<i32>() {
Ok(i) => {
json!(i)
},
Err(_) => {
warn!("Unsupported argument type");
serde_json::Value::Null
}
}
}
fn interface_device(d: &str) -> (u32, u32, &str) {
match d {
SENSEL_DEVICE => (SENSEL_WIDTH, SENSEL_HEIGHT, SENSEL_DEVICE),
LIGHTPAD_DEVICE => (LIGHTPAD_WIDTH, LIGHTPAD_HEIGHT, LIGHTPAD_DEVICE),
_ => {
error!("UNKNOWN interface type");
(0,0, "unknown")
}
}
}
//-----------------------------------------------------------------------------
// command line utilities
fn print_usage(program: &str, opts: Options) {
let brief = format!("Usage: {} FILE [options]", program);
print!("{}", opts.usage(&brief));
}
//-----------------------------------------------------------------------------
// entry point
fn main() {
// handle command line arguments
let args: Vec<String> = env::args().collect();
let program = args[0].clone();
let mut opts = Options::new();
opts.optopt("", "png", "generate PNG output", "NAME");
opts.optopt("", "json", "JSON file output", "NAME");
opts.optflag("", "illustrator", "SVG used Adobe Illustrator attribute encoding");
opts.optflag("h", "help", "print this help menu");
let matches = match opts.parse(&args[1..]) {
Ok(m) => { m }
Err(f) => { panic!(f.to_string()) }
};
// display help and exit if necessary
if matches.opt_present("h") {
print_usage(&program, opts);
return;
}
// input was exported from Adobe Illistrator, so utilises a different attribute encoding
let illustrator = matches.opt_present("illustrator");
// PNG path or ""
let png_path = match matches.opt_str("png") {
Some(x) => x,
None => "".to_string(),
};
// must be an interface.svg
let svg_path = if !matches.free.is_empty() {
matches.free[0].clone()
} else {
print_usage(&program, opts);
return;
};
let mut controllers = vec![];
let mut device_width = 0;
let mut device_height = 0;
let mut device_name: String = UNSUPPORTED_DEVICE.to_string();
let doc = svg::open(svg_path.clone()).unwrap();
for event in doc {
match event {
Event::Tag(SVG, _, attributes) => {
match attributes.get("interface_device") {
Some(attr) => {
//Checks for the special case that a custom device with dimensions read from the SVG itself.
if attr.to_string() == CUSTOM_DEVICE {
device_width = attributes.get("width").unwrap().parse().unwrap();
device_height = attributes.get("height").unwrap().parse().unwrap();
device_name = CUSTOM_DEVICE.to_string();
}
else {
let (w,h,n) = interface_device(attr);
device_width = w;
device_height = h;
device_name = n.to_string();
}
},
_ => {
}
}
},
_ => {}
};
}
// check if successfully found a valid interface device
if device_name == UNSUPPORTED_DEVICE.to_string() {
eprintln!("Unsupported Interface type");
return;
}
// create an image mapping to the sensel, with white background
let mut image = picto::Buffer::<Rgb, u8, _>::from_pixel(
device_width,
device_height,
&Rgb::new(1.0, 1.0, 1.0));
let mut interface = Interface::new(device_width, device_height);
let doc = svg::open(svg_path).unwrap();
for event in doc {
match event {
Event::Tag(Path, _, attributes) => {
let data = attributes.get("d").unwrap();
let mesh = path_tessellate(data);
// rasterize to PNG, if requested
if png_path.len() > 0 {
rasterize(device_width, device_height, &mesh, &mut image, &Rgb::new(0.0, 0.0, 0.0));
}
// add to interface buffer
let id = interface.id();
rasterize(device_width, device_height, &mesh, &mut interface, &id);
if illustrator {
// now handle the data-name attribute, which contains info about the type
// of controller, OSC message, and so on.
// TODO: add some error checking, to avoid "seg-faulting" :-)
let data_name = attributes.get("data-name").unwrap();
let need_to_live = (&*data_name).to_string();
let mut args : Vec<&str> = need_to_live.split(' ').collect();
let typ = handle_type(args[1]);
if typ != "none" {
// osc address
let msg = args[2];
let mut args_json = vec![];
if args.len() > 3 {
args.drain(0..3);
// only osc arguments left
for a in args {
let arg = handle_argument(a);
if arg != serde_json::Value::Null {
args_json.push(arg);
}
}
}
let path = json!({
"id": id,
"type_id": typ,
"args" : args_json,
"address" : msg
});
controllers.push(path);
}
}
else {
let typ = handle_type(attributes.get(INTERFACE_TYPE_ATTR).unwrap());
if typ != "none" {
let msg = attributes.get(INTERFACE_OSC_ADDRESS_ATTR).unwrap() as &str;
let mut args_json = vec![];
match attributes.get(INTERFACE_OSC_ARGS_ATTR) {
Some(osc_args) => {
let need_to_live = (&*osc_args).to_string();
let mut args : Vec<&str> = need_to_live.split(' ').collect();
for a in args {
let arg = handle_argument(a);
if arg != serde_json::Value::Null {
args_json.push(arg);
}
}
},
_ => {
}
}
let path = json!({
"id": id,
"type_id": typ,
"args" : args_json,
"address" : msg
});
controllers.push(path);
}
}
},
Event::Tag(Rectangle, _, attributes) => {
let x = handle_int_attribute(attributes.get("x").unwrap(), illustrator);
let y = handle_int_attribute(attributes.get("y").unwrap(), illustrator);
let width = handle_int_attribute(attributes.get("width").unwrap(), illustrator);
let height = handle_int_attribute(attributes.get("height").unwrap(), illustrator);
if illustrator {
// now handle the data-name attribute, which contains info about the type
// of controller, OSC message, and so on.
// TODO: add some error checking, to avoid "seg-faulting" :-)
let data_name = attributes.get("data-name").unwrap();
let need_to_live = (&*data_name).to_string();
let mut args : Vec<&str> = need_to_live.split(' ').collect();
// controller type
let typ = handle_type(args[1]);
// none is a special case used for printing border for cutout, not included in interface
if typ != "none" {
let mesh = rect_tessellate(x,y, width, height);
// rasterize to PNG, if requested
if png_path.len() > 0 {
rasterize(device_width, device_height, &mesh, &mut image, &Rgb::new(0.0, 0.0, 0.0));
}
// add to interface buffer
let id = interface.id();
rasterize(device_width, device_height, &mesh, &mut interface, &id);
// osc address
let msg = args[2];
let mut args_json = vec![];
if args.len() > 3 {
args.drain(0..3);
// only osc arguments left
for a in args {
let arg = handle_argument(a);
if arg != serde_json::Value::Null {
args_json.push(arg);
}
}
}
let rect = json!({
"id": id,
"type_id": typ,
"args" : args_json,
"address" : msg
});
controllers.push(rect);
}
}
else {
let typ = handle_type(attributes.get(INTERFACE_TYPE_ATTR).unwrap());
// none is a special case used for printing border for cutout, not included in interface
if typ != "none" {
let mesh = rect_tessellate(x,y, width, height);
// rasterize to PNG, if requested
if png_path.len() > 0 {
rasterize(device_width, device_height, &mesh, &mut image, &Rgb::new(0.0, 0.0, 0.0));
}
// add to interface buffer
let id = interface.id();
rasterize(device_width, device_height, &mesh, &mut interface, &id);
let msg = attributes.get(INTERFACE_OSC_ADDRESS_ATTR).unwrap() as &str;
let mut args_json = vec![];
match attributes.get(INTERFACE_OSC_ARGS_ATTR) {
Some(osc_args) => {
let need_to_live = (&*osc_args).to_string();
let mut args : Vec<&str> = need_to_live.split(' ').collect();
for a in args {
let arg = handle_argument(a);
if arg != serde_json::Value::Null {
args_json.push(arg);
}
}
},
_ => {
}
}
let mut args_onoff = vec![];
match attributes.get("on") {
Some(value) => {
let arg = handle_argument(value);
if arg != serde_json::Value::Null {
args_onoff.push(arg);
}
match attributes.get("off") {
Some(value) => {
let arg = handle_argument(value);
if arg != serde_json::Value::Null {
args_onoff.push(arg);
}
},
_ => {
}
};
},
_ => {
}
};
let rgb = match attributes.get("fill") {
Some(fill) => {
fill
},
_ => {
"rgb(0,0,0)"
}
};
let pressure = match attributes.get("pressure") {
Some(pressure) => {
pressure.to_string() == "True"
},
_ => {
false
}
};
let with_coords = match attributes.get("with_coords") {
Some(with_coords) => {
with_coords.to_string() == "True"
},
_ => {
false
}
};
if typ == "vert_slider" || typ == "horz_slider" {
let min = (attributes.get(INTERFACE_MIN_ATTR).unwrap() as &str).parse::<u32>().unwrap();
let max = (attributes.get(INTERFACE_MAX_ATTR).unwrap() as &str).parse::<u32>().unwrap();
let rect = json!({
"id": id,
"type_id": typ,
"args" : args_json,
"address" : msg,
"min" : min,
"max" : max,
"rgb" : rgb,
"pressure": pressure,
"generate_coords": with_coords,
});
controllers.push(rect);
}
else {
let generate_move = match attributes.get("with_move") {
Some(with_move) => {
with_move.to_string() == "True"
},
_ => {
false
}
};
let generate_end = match attributes.get("with_end") {
Some(with_end) => {
with_end.to_string() == "True"
},
_ => {
false
}
};
if args_onoff.len() == 2 && typ == "pad" {
let rect = json!({
"id": id,
"type_id": "dpad",
"args" : args_json,
"address" : msg,
"rgb": rgb,
"pressure": pressure,
"generate_move": generate_move,
"generate_end": generate_end,
"generate_coords": with_coords,
"on": args_onoff[0],
"off": args_onoff[1],
});
controllers.push(rect);
}
else {
let rect = json!({
"id": id,
"type_id": typ,
"args" : args_json,
"address" : msg,
"rgb": rgb,
"pressure": pressure,
"generate_move": generate_move,
"generate_end": generate_end,
"generate_coords": with_coords,
});
controllers.push(rect);
}
}
}
}
},
Event::Tag(Circle, _, attributes) => {
let cpoint = point2i(
handle_int_attribute(attributes.get("cx").unwrap(), illustrator),
handle_int_attribute(attributes.get("cy").unwrap(), illustrator));
let r = handle_int_attribute(attributes.get("r").unwrap(), illustrator);
let mesh = circle_tessellate(cpoint,r);
// rasterize to PNG, if requested
if png_path.len() > 0 {
rasterize(device_width, device_height, &mesh, &mut image, &Rgb::new(0.0, 0.0, 0.0));
}
// add to interface buffer
let id = interface.id();
rasterize(device_width, device_height, &mesh, &mut interface, &id);
if illustrator {
// now handle the data-name attribute, which contains info about the type
// of controller, OSC message, and so on.
// TODO: add some error checking, to avoid "seg-faulting" :-)
let data_name = attributes.get("data-name").unwrap();
let need_to_live = (&*data_name).to_string();
let mut args : Vec<&str> = need_to_live.split(' ').collect();
let typ = handle_type(args[1]);
if typ != "none" {
// osc address
let msg = args[2];
let mut args_json = vec![];
if args.len() > 3 {
args.drain(0..3);
// only osc arguments left
for a in args {
let arg = handle_argument(a);
if arg != serde_json::Value::Null {
args_json.push(arg);
}
}
}
let circle = json!({
"id": id,
"type_id": typ,
"args" : args_json,
"address" : msg
});
controllers.push(circle);
}
}
else {
let typ = handle_type(attributes.get(INTERFACE_TYPE_ATTR).unwrap());
// none is a special case used for printing border for cutout, not included in interface
if typ != "none" {
let msg = attributes.get(INTERFACE_OSC_ADDRESS_ATTR).unwrap() as &str;
let mut args_json = vec![];
match attributes.get(INTERFACE_OSC_ARGS_ATTR) {
Some(osc_args) => {
let need_to_live = (&*osc_args).to_string();
let mut args : Vec<&str> = need_to_live.split(' ').collect();
for a in args {
let arg = handle_argument(a);
if arg != serde_json::Value::Null {
args_json.push(arg);
}
}
},
_ => {
}
}
let mut args_onoff = vec![];
match attributes.get("on") {
Some(value) => {
let arg = handle_argument(value);
if arg != serde_json::Value::Null {
args_onoff.push(arg);
}
match attributes.get("off") {
Some(value) => {
let arg = handle_argument(value);
if arg != serde_json::Value::Null {
args_onoff.push(arg);
}
},
_ => {
}
};
},
_ => {
}
};
// TODO: all this code refactoring...
if args_onoff.len() == 2 && typ == "pad" {
let circle = json!({
"id": id,
"type_id": "dpad",
"args" : args_json,
"address" : msg,
"on": args_onoff[0],
"off": args_onoff[1],
});
controllers.push(circle);
}
else {
let circle = json!({
"id": id,
"type_id": typ,
"args" : args_json,
"address" : msg
});
controllers.push(circle);
}
}
}
},
Event::Tag(Polygon, _, attributes) => {
// convert each elem of points to i32
let values: Vec<i32> = attributes
.get("points")
.unwrap()
.split(' ')
.map(|s| handle_int_attribute(s, illustrator))
.collect();
// pair them up into points
let points: Vec<Point2i> = values
.chunks(2)
.map(|p| point2i(p[0], p[1]))
.collect();
let mesh = polygon_tessellate(points);
// rasterize to PNG, if requested
if png_path.len() > 0 {
rasterize(device_width, device_height, &mesh, &mut image, &Rgb::new(0.0, 0.0, 0.0));
}
// add to interface buffer
let id = interface.id();
rasterize(device_width, device_height, &mesh, &mut interface, &id);
if illustrator {
// now handle the data-name attribute, which contains info about the type
// of controller, OSC message, and so on.
// TODO: add some error checking, to avoid "seg-faulting" :-)
let data_name = attributes.get("data-name").unwrap();
let need_to_live = (&*data_name).to_string();
let mut args : Vec<&str> = need_to_live.split(' ').collect();
let typ = handle_type(args[1]);
// osc address
let msg = args[2];
let mut args_json = vec![];
if args.len() > 3 {
args.drain(0..3);
// only osc arguments left
for a in args {
let arg = handle_argument(a);
if arg != serde_json::Value::Null {
args_json.push(arg);
}
}
}
let poly = json!({
"id": id,
"type_id": typ,
"args" : args_json,
"address" : msg
});
controllers.push(poly);
}
else {
let typ = handle_type(attributes.get(INTERFACE_TYPE_ATTR).unwrap());
// none is a special case used for printing border for cutout, not included in interface
let msg = attributes.get(INTERFACE_OSC_ADDRESS_ATTR).unwrap() as &str;
let mut args_json = vec![];
match attributes.get(INTERFACE_OSC_ARGS_ATTR) {
Some(osc_args) => {
let need_to_live = (&*osc_args).to_string();
let mut args : Vec<&str> = need_to_live.split(' ').collect();
for a in args {
let arg = handle_argument(a);
if arg != serde_json::Value::Null {
args_json.push(arg);
}
}
},
_ => {
}
}
let poly = json!({
"id": id,
"type_id": typ,
"args" : args_json,
"address" : msg
});
controllers.push(poly);
}
},
_ => {}
}
}
let interface_json = json!({
"controllers": controllers,
"buffer": interface.to_json(),
"interface": device_name,
});
println!("{}", interface_json.to_string());
// write PNG image of sensel interface
if png_path.len() > 0 {
picto::write::to_path(png_path, &image).unwrap();
}
}
| 36.246587 | 134 | 0.427603 |
38723587ed7c649594bb38f758d9cae161cc137d | 1,560 | use idea_discussion_master::{
commands::{
add_agenda::*, add_github_issue::*, end_discussion::*, end_votes::*, help::*,
show_agendas::*, start_discussion::*, start_votes::*,
},
globals::{agendas::Agendas, record_id::RecordId, voice_chat_channel_id::VoiceChatChannelId},
listeners::{self, after_commands, before_commands},
utils,
};
use serenity::{
framework::{standard::macros::group, StandardFramework},
prelude::Client,
};
use std::{collections::HashMap, sync::Arc};
use tokio::sync::RwLock;
#[group]
#[only_in(guilds)]
#[commands(
start_discussion,
end_discussion,
start_votes,
end_votes,
add_agenda,
show_agendas,
add_github_issue
)]
struct General;
#[tokio::main]
async fn main() {
let framework = StandardFramework::new()
.configure(|config| config.prefix("\\"))
.after(after_commands)
.before(before_commands)
.group(&GENERAL_GROUP)
.help(&MY_HELP);
let mut client = Client::builder(&utils::Env::new().discord_token)
.framework(framework)
.event_handler(listeners::Handler)
.await
.expect("クライアントの作成中にエラーが発生しました");
{
let mut data = client.data.write().await;
data.insert::<RecordId>(Arc::new(RwLock::new(None)));
data.insert::<Agendas>(Arc::new(RwLock::new(HashMap::default())));
data.insert::<VoiceChatChannelId>(Arc::new(RwLock::new(None)));
}
if let Err(reason) = client.start().await {
eprintln!("クライアントの起動に失敗しました: {:?}", reason);
}
}
| 27.368421 | 96 | 0.634615 |
1859359c4f0badc00d7254d6915d290cfaf97522 | 3,128 | #[cfg(test)]
use mocktopus::macros::mockable;
#[cfg_attr(test, mockable)]
pub(crate) mod collateral {
use crate::types::Collateral;
use currency::ParachainCurrency;
use frame_support::dispatch::DispatchResult;
pub fn transfer<T: crate::Config>(
source: &T::AccountId,
destination: &T::AccountId,
amount: Collateral<T>,
) -> DispatchResult {
T::Collateral::transfer(source, destination, amount)
}
pub fn lock<T: crate::Config>(sender: &T::AccountId, amount: Collateral<T>) -> DispatchResult {
T::Collateral::lock(sender, amount)
}
pub fn unlock<T: crate::Config>(sender: &T::AccountId, amount: Collateral<T>) -> DispatchResult {
T::Collateral::unlock(sender, amount)
}
pub fn get_reserved_balance<T: crate::Config>(id: &T::AccountId) -> Collateral<T> {
T::Collateral::get_reserved_balance(id)
}
pub fn get_free_balance<T: crate::Config>(id: &T::AccountId) -> Collateral<T> {
T::Collateral::get_free_balance(id)
}
}
#[cfg_attr(test, mockable)]
pub(crate) mod treasury {
use crate::types::Wrapped;
use currency::ParachainCurrency;
pub fn total_issued<T: crate::Config>() -> Wrapped<T> {
T::Wrapped::get_total_supply()
}
}
#[cfg_attr(test, mockable)]
pub(crate) mod oracle {
use crate::types::{Collateral, Wrapped};
use frame_support::dispatch::DispatchError;
pub fn wrapped_to_collateral<T: crate::Config>(amount: Wrapped<T>) -> Result<Collateral<T>, DispatchError> {
<exchange_rate_oracle::Pallet<T>>::wrapped_to_collateral(amount)
}
pub fn collateral_to_wrapped<T: crate::Config>(amount: Collateral<T>) -> Result<Wrapped<T>, DispatchError> {
<exchange_rate_oracle::Pallet<T>>::collateral_to_wrapped(amount)
}
}
#[cfg_attr(test, mockable)]
pub(crate) mod security {
use frame_support::dispatch::DispatchResult;
pub fn ensure_parachain_status_not_shutdown<T: crate::Config>() -> DispatchResult {
<security::Pallet<T>>::ensure_parachain_status_not_shutdown()
}
pub fn active_block_number<T: crate::Config>() -> T::BlockNumber {
<security::Pallet<T>>::active_block_number()
}
}
#[cfg_attr(test, mockable)]
pub(crate) mod sla {
use crate::types::{BalanceOf, UnsignedFixedPoint};
use frame_support::dispatch::DispatchError;
pub use sla::Action;
pub fn calculate_slashed_amount<T: crate::Config>(
vault_id: &T::AccountId,
stake: BalanceOf<T>,
reimburse: bool,
liquidation_threshold: UnsignedFixedPoint<T>,
premium_redeem_threshold: UnsignedFixedPoint<T>,
) -> Result<BalanceOf<T>, DispatchError> {
<sla::Pallet<T>>::calculate_slashed_amount(
vault_id,
stake,
reimburse,
liquidation_threshold,
premium_redeem_threshold,
)
}
pub fn event_update_vault_sla<T: crate::Config>(
vault_id: &T::AccountId,
action: Action<BalanceOf<T>>,
) -> Result<(), DispatchError> {
<sla::Pallet<T>>::event_update_vault_sla(vault_id, action)
}
}
| 30.970297 | 112 | 0.652494 |
29b5320e7b693598ecdd8704c5e85baa2398a089 | 2,351 | // c:layout
use super::ManualLayout;
use quick_xml::events::{BytesStart, Event};
use quick_xml::Reader;
use quick_xml::Writer;
use std::io::Cursor;
use writer::driver::*;
#[derive(Clone, Default, Debug)]
pub struct Layout {
manual_layout: Option<ManualLayout>,
}
impl Layout {
pub fn get_manual_layout(&self) -> &Option<ManualLayout> {
&self.manual_layout
}
pub fn get_manual_layout_mut(&mut self) -> &mut Option<ManualLayout> {
&mut self.manual_layout
}
pub fn set_manual_layout(&mut self, value: ManualLayout) -> &mut Layout {
self.manual_layout = Some(value);
self
}
pub fn is_empty(&self) -> bool {
self.manual_layout.is_none()
}
pub(crate) fn set_attributes<R: std::io::BufRead>(
&mut self,
reader: &mut Reader<R>,
_e: &BytesStart,
empty_flag: bool,
) {
if empty_flag {
return;
}
let mut buf = Vec::new();
loop {
match reader.read_event(&mut buf) {
Ok(Event::Start(ref e)) => match e.name() {
b"c:manualLayout" => {
let mut obj = ManualLayout::default();
obj.set_attributes(reader, e);
&mut self.set_manual_layout(obj);
}
_ => (),
},
Ok(Event::End(ref e)) => match e.name() {
b"c:layout" => return,
_ => (),
},
Ok(Event::Eof) => panic!("Error not find {} end element", "c:layout"),
Err(e) => panic!("Error at position {}: {:?}", reader.buffer_position(), e),
_ => (),
}
buf.clear();
}
}
pub(crate) fn write_to(&self, writer: &mut Writer<Cursor<Vec<u8>>>) {
if self.is_empty() {
// c:layout
write_start_tag(writer, "c:layout", vec![], true);
} else {
// c:layout
write_start_tag(writer, "c:layout", vec![], false);
// c:manualLayout
match &self.manual_layout {
Some(v) => {
v.write_to(writer);
}
None => {}
}
write_end_tag(writer, "c:layout");
}
}
}
| 27.988095 | 92 | 0.473416 |
d5ad95ace973fbc73af115c6d03a119d19428153 | 143,491 | // ignore-tidy-filelength
//! This crate is responsible for the part of name resolution that doesn't require type checker.
//!
//! Module structure of the crate is built here.
//! Paths in macros, imports, expressions, types, patterns are resolved here.
//! Label and lifetime names are resolved here as well.
//!
//! Type-relative name resolution (methods, fields, associated items) happens in `rustc_typeck`.
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
#![feature(box_patterns)]
#![feature(drain_filter)]
#![feature(bool_to_option)]
#![feature(crate_visibility_modifier)]
#![feature(let_else)]
#![feature(never_type)]
#![feature(nll)]
#![recursion_limit = "256"]
#![allow(rustdoc::private_intra_doc_links)]
#![cfg_attr(not(bootstrap), allow(rustc::potential_query_instability))]
#[macro_use]
extern crate tracing;
pub use rustc_hir::def::{Namespace, PerNS};
use Determinacy::*;
use rustc_arena::{DroplessArena, TypedArena};
use rustc_ast::node_id::NodeMap;
use rustc_ast::ptr::P;
use rustc_ast::visit::{self, Visitor};
use rustc_ast::{self as ast, NodeId};
use rustc_ast::{Crate, CRATE_NODE_ID};
use rustc_ast::{Expr, ExprKind, LitKind};
use rustc_ast::{ItemKind, ModKind, Path};
use rustc_ast_lowering::ResolverAstLowering;
use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
use rustc_data_structures::ptr_key::PtrKey;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder};
use rustc_expand::base::{DeriveResolutions, SyntaxExtension, SyntaxExtensionKind};
use rustc_hir::def::Namespace::*;
use rustc_hir::def::{self, CtorOf, DefKind, NonMacroAttrKind, PartialRes};
use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, DefPathHash, LocalDefId};
use rustc_hir::def_id::{CRATE_DEF_ID, CRATE_DEF_INDEX, LOCAL_CRATE};
use rustc_hir::definitions::{DefKey, DefPathData, Definitions};
use rustc_hir::TraitCandidate;
use rustc_index::vec::IndexVec;
use rustc_metadata::creader::{CStore, CrateLoader};
use rustc_middle::metadata::ModChild;
use rustc_middle::middle::privacy::AccessLevels;
use rustc_middle::span_bug;
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::{self, DefIdTree, MainDefinition, RegisteredTools, ResolverOutputs};
use rustc_query_system::ich::StableHashingContext;
use rustc_session::cstore::{CrateStore, MetadataLoaderDyn};
use rustc_session::lint;
use rustc_session::lint::{BuiltinLintDiagnostics, LintBuffer};
use rustc_session::Session;
use rustc_span::edition::Edition;
use rustc_span::hygiene::{ExpnId, ExpnKind, LocalExpnId, MacroKind, SyntaxContext, Transparency};
use rustc_span::source_map::Spanned;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{Span, DUMMY_SP};
use smallvec::{smallvec, SmallVec};
use std::cell::{Cell, RefCell};
use std::collections::{BTreeMap, BTreeSet};
use std::ops::ControlFlow;
use std::{cmp, fmt, iter, mem, ptr};
use tracing::debug;
use diagnostics::{extend_span_to_previous_binding, find_span_of_binding_until_next_binding};
use diagnostics::{ImportSuggestion, LabelSuggestion, Suggestion};
use imports::{Import, ImportKind, ImportResolver, NameResolution};
use late::{ConstantItemKind, HasGenericParams, PathSource, Rib, RibKind::*};
use macros::{MacroRulesBinding, MacroRulesScope, MacroRulesScopeRef};
use crate::access_levels::AccessLevelsVisitor;
type Res = def::Res<NodeId>;
mod access_levels;
mod build_reduced_graph;
mod check_unused;
mod def_collector;
mod diagnostics;
mod imports;
mod late;
mod macros;
enum Weak {
Yes,
No,
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum Determinacy {
Determined,
Undetermined,
}
impl Determinacy {
fn determined(determined: bool) -> Determinacy {
if determined { Determinacy::Determined } else { Determinacy::Undetermined }
}
}
/// A specific scope in which a name can be looked up.
/// This enum is currently used only for early resolution (imports and macros),
/// but not for late resolution yet.
#[derive(Clone, Copy)]
enum Scope<'a> {
DeriveHelpers(LocalExpnId),
DeriveHelpersCompat,
MacroRules(MacroRulesScopeRef<'a>),
CrateRoot,
// The node ID is for reporting the `PROC_MACRO_DERIVE_RESOLUTION_FALLBACK`
// lint if it should be reported.
Module(Module<'a>, Option<NodeId>),
RegisteredAttrs,
MacroUsePrelude,
BuiltinAttrs,
ExternPrelude,
ToolPrelude,
StdLibPrelude,
BuiltinTypes,
}
/// Names from different contexts may want to visit different subsets of all specific scopes
/// with different restrictions when looking up the resolution.
/// This enum is currently used only for early resolution (imports and macros),
/// but not for late resolution yet.
#[derive(Clone, Copy)]
enum ScopeSet<'a> {
/// All scopes with the given namespace.
All(Namespace, /*is_import*/ bool),
/// Crate root, then extern prelude (used for mixed 2015-2018 mode in macros).
AbsolutePath(Namespace),
/// All scopes with macro namespace and the given macro kind restriction.
Macro(MacroKind),
/// All scopes with the given namespace, used for partially performing late resolution.
/// The node id enables lints and is used for reporting them.
Late(Namespace, Module<'a>, Option<NodeId>),
}
/// Everything you need to know about a name's location to resolve it.
/// Serves as a starting point for the scope visitor.
/// This struct is currently used only for early resolution (imports and macros),
/// but not for late resolution yet.
#[derive(Clone, Copy, Debug)]
pub struct ParentScope<'a> {
module: Module<'a>,
expansion: LocalExpnId,
macro_rules: MacroRulesScopeRef<'a>,
derives: &'a [ast::Path],
}
impl<'a> ParentScope<'a> {
/// Creates a parent scope with the passed argument used as the module scope component,
/// and other scope components set to default empty values.
pub fn module(module: Module<'a>, resolver: &Resolver<'a>) -> ParentScope<'a> {
ParentScope {
module,
expansion: LocalExpnId::ROOT,
macro_rules: resolver.arenas.alloc_macro_rules_scope(MacroRulesScope::Empty),
derives: &[],
}
}
}
#[derive(Copy, Debug, Clone)]
enum ImplTraitContext {
Existential,
Universal(LocalDefId),
}
#[derive(Eq)]
struct BindingError {
name: Symbol,
origin: BTreeSet<Span>,
target: BTreeSet<Span>,
could_be_path: bool,
}
impl PartialOrd for BindingError {
fn partial_cmp(&self, other: &BindingError) -> Option<cmp::Ordering> {
Some(self.cmp(other))
}
}
impl PartialEq for BindingError {
fn eq(&self, other: &BindingError) -> bool {
self.name == other.name
}
}
impl Ord for BindingError {
fn cmp(&self, other: &BindingError) -> cmp::Ordering {
self.name.cmp(&other.name)
}
}
enum ResolutionError<'a> {
/// Error E0401: can't use type or const parameters from outer function.
GenericParamsFromOuterFunction(Res, HasGenericParams),
/// Error E0403: the name is already used for a type or const parameter in this generic
/// parameter list.
NameAlreadyUsedInParameterList(Symbol, Span),
/// Error E0407: method is not a member of trait.
MethodNotMemberOfTrait(Ident, &'a str, Option<Symbol>),
/// Error E0437: type is not a member of trait.
TypeNotMemberOfTrait(Ident, &'a str, Option<Symbol>),
/// Error E0438: const is not a member of trait.
ConstNotMemberOfTrait(Ident, &'a str, Option<Symbol>),
/// Error E0408: variable `{}` is not bound in all patterns.
VariableNotBoundInPattern(&'a BindingError),
/// Error E0409: variable `{}` is bound in inconsistent ways within the same match arm.
VariableBoundWithDifferentMode(Symbol, Span),
/// Error E0415: identifier is bound more than once in this parameter list.
IdentifierBoundMoreThanOnceInParameterList(Symbol),
/// Error E0416: identifier is bound more than once in the same pattern.
IdentifierBoundMoreThanOnceInSamePattern(Symbol),
/// Error E0426: use of undeclared label.
UndeclaredLabel { name: Symbol, suggestion: Option<LabelSuggestion> },
/// Error E0429: `self` imports are only allowed within a `{ }` list.
SelfImportsOnlyAllowedWithin { root: bool, span_with_rename: Span },
/// Error E0430: `self` import can only appear once in the list.
SelfImportCanOnlyAppearOnceInTheList,
/// Error E0431: `self` import can only appear in an import list with a non-empty prefix.
SelfImportOnlyInImportListWithNonEmptyPrefix,
/// Error E0433: failed to resolve.
FailedToResolve { label: String, suggestion: Option<Suggestion> },
/// Error E0434: can't capture dynamic environment in a fn item.
CannotCaptureDynamicEnvironmentInFnItem,
/// Error E0435: attempt to use a non-constant value in a constant.
AttemptToUseNonConstantValueInConstant(
Ident,
/* suggestion */ &'static str,
/* current */ &'static str,
),
/// Error E0530: `X` bindings cannot shadow `Y`s.
BindingShadowsSomethingUnacceptable {
shadowing_binding_descr: &'static str,
name: Symbol,
participle: &'static str,
article: &'static str,
shadowed_binding_descr: &'static str,
shadowed_binding_span: Span,
},
/// Error E0128: generic parameters with a default cannot use forward-declared identifiers.
ForwardDeclaredGenericParam,
/// ERROR E0770: the type of const parameters must not depend on other generic parameters.
ParamInTyOfConstParam(Symbol),
/// generic parameters must not be used inside const evaluations.
///
/// This error is only emitted when using `min_const_generics`.
ParamInNonTrivialAnonConst { name: Symbol, is_type: bool },
/// Error E0735: generic parameters with a default cannot use `Self`
SelfInGenericParamDefault,
/// Error E0767: use of unreachable label
UnreachableLabel { name: Symbol, definition_span: Span, suggestion: Option<LabelSuggestion> },
/// Error E0323, E0324, E0325: mismatch between trait item and impl item.
TraitImplMismatch {
name: Symbol,
kind: &'static str,
trait_path: String,
trait_item_span: Span,
code: rustc_errors::DiagnosticId,
},
}
enum VisResolutionError<'a> {
Relative2018(Span, &'a ast::Path),
AncestorOnly(Span),
FailedToResolve(Span, String, Option<Suggestion>),
ExpectedFound(Span, String, Res),
Indeterminate(Span),
ModuleOnly(Span),
}
/// A minimal representation of a path segment. We use this in resolve because we synthesize 'path
/// segments' which don't have the rest of an AST or HIR `PathSegment`.
#[derive(Clone, Copy, Debug)]
pub struct Segment {
ident: Ident,
id: Option<NodeId>,
/// Signals whether this `PathSegment` has generic arguments. Used to avoid providing
/// nonsensical suggestions.
has_generic_args: bool,
}
impl Segment {
fn from_path(path: &Path) -> Vec<Segment> {
path.segments.iter().map(|s| s.into()).collect()
}
fn from_ident(ident: Ident) -> Segment {
Segment { ident, id: None, has_generic_args: false }
}
fn names_to_string(segments: &[Segment]) -> String {
names_to_string(&segments.iter().map(|seg| seg.ident.name).collect::<Vec<_>>())
}
}
impl<'a> From<&'a ast::PathSegment> for Segment {
fn from(seg: &'a ast::PathSegment) -> Segment {
Segment { ident: seg.ident, id: Some(seg.id), has_generic_args: seg.args.is_some() }
}
}
struct UsePlacementFinder {
target_module: NodeId,
span: Option<Span>,
found_use: bool,
}
impl UsePlacementFinder {
fn check(krate: &Crate, target_module: NodeId) -> (Option<Span>, bool) {
let mut finder = UsePlacementFinder { target_module, span: None, found_use: false };
if let ControlFlow::Continue(..) = finder.check_mod(&krate.items, CRATE_NODE_ID) {
visit::walk_crate(&mut finder, krate);
}
(finder.span, finder.found_use)
}
fn check_mod(&mut self, items: &[P<ast::Item>], node_id: NodeId) -> ControlFlow<()> {
if self.span.is_some() {
return ControlFlow::Break(());
}
if node_id != self.target_module {
return ControlFlow::Continue(());
}
// find a use statement
for item in items {
match item.kind {
ItemKind::Use(..) => {
// don't suggest placing a use before the prelude
// import or other generated ones
if !item.span.from_expansion() {
self.span = Some(item.span.shrink_to_lo());
self.found_use = true;
return ControlFlow::Break(());
}
}
// don't place use before extern crate
ItemKind::ExternCrate(_) => {}
// but place them before the first other item
_ => {
if self.span.map_or(true, |span| item.span < span)
&& !item.span.from_expansion()
{
self.span = Some(item.span.shrink_to_lo());
// don't insert between attributes and an item
// find the first attribute on the item
// FIXME: This is broken for active attributes.
for attr in &item.attrs {
if !attr.span.is_dummy()
&& self.span.map_or(true, |span| attr.span < span)
{
self.span = Some(attr.span.shrink_to_lo());
}
}
}
}
}
}
ControlFlow::Continue(())
}
}
impl<'tcx> Visitor<'tcx> for UsePlacementFinder {
fn visit_item(&mut self, item: &'tcx ast::Item) {
if let ItemKind::Mod(_, ModKind::Loaded(items, ..)) = &item.kind {
if let ControlFlow::Break(..) = self.check_mod(items, item.id) {
return;
}
}
visit::walk_item(self, item);
}
}
/// An intermediate resolution result.
///
/// This refers to the thing referred by a name. The difference between `Res` and `Item` is that
/// items are visible in their whole block, while `Res`es only from the place they are defined
/// forward.
#[derive(Debug)]
enum LexicalScopeBinding<'a> {
Item(&'a NameBinding<'a>),
Res(Res),
}
impl<'a> LexicalScopeBinding<'a> {
fn res(self) -> Res {
match self {
LexicalScopeBinding::Item(binding) => binding.res(),
LexicalScopeBinding::Res(res) => res,
}
}
}
#[derive(Copy, Clone, Debug)]
enum ModuleOrUniformRoot<'a> {
/// Regular module.
Module(Module<'a>),
/// Virtual module that denotes resolution in crate root with fallback to extern prelude.
CrateRootAndExternPrelude,
/// Virtual module that denotes resolution in extern prelude.
/// Used for paths starting with `::` on 2018 edition.
ExternPrelude,
/// Virtual module that denotes resolution in current scope.
/// Used only for resolving single-segment imports. The reason it exists is that import paths
/// are always split into two parts, the first of which should be some kind of module.
CurrentScope,
}
impl ModuleOrUniformRoot<'_> {
fn same_def(lhs: Self, rhs: Self) -> bool {
match (lhs, rhs) {
(ModuleOrUniformRoot::Module(lhs), ModuleOrUniformRoot::Module(rhs)) => {
ptr::eq(lhs, rhs)
}
(
ModuleOrUniformRoot::CrateRootAndExternPrelude,
ModuleOrUniformRoot::CrateRootAndExternPrelude,
)
| (ModuleOrUniformRoot::ExternPrelude, ModuleOrUniformRoot::ExternPrelude)
| (ModuleOrUniformRoot::CurrentScope, ModuleOrUniformRoot::CurrentScope) => true,
_ => false,
}
}
}
#[derive(Clone, Debug)]
enum PathResult<'a> {
Module(ModuleOrUniformRoot<'a>),
NonModule(PartialRes),
Indeterminate,
Failed {
span: Span,
label: String,
suggestion: Option<Suggestion>,
is_error_from_last_segment: bool,
},
}
#[derive(Debug)]
enum ModuleKind {
/// An anonymous module; e.g., just a block.
///
/// ```
/// fn main() {
/// fn f() {} // (1)
/// { // This is an anonymous module
/// f(); // This resolves to (2) as we are inside the block.
/// fn f() {} // (2)
/// }
/// f(); // Resolves to (1)
/// }
/// ```
Block(NodeId),
/// Any module with a name.
///
/// This could be:
///
/// * A normal module – either `mod from_file;` or `mod from_block { }` –
/// or the crate root (which is conceptually a top-level module).
/// Note that the crate root's [name][Self::name] will be [`kw::Empty`].
/// * A trait or an enum (it implicitly contains associated types, methods and variant
/// constructors).
Def(DefKind, DefId, Symbol),
}
impl ModuleKind {
/// Get name of the module.
pub fn name(&self) -> Option<Symbol> {
match self {
ModuleKind::Block(..) => None,
ModuleKind::Def(.., name) => Some(*name),
}
}
}
/// A key that identifies a binding in a given `Module`.
///
/// Multiple bindings in the same module can have the same key (in a valid
/// program) if all but one of them come from glob imports.
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
struct BindingKey {
/// The identifier for the binding, aways the `normalize_to_macros_2_0` version of the
/// identifier.
ident: Ident,
ns: Namespace,
/// 0 if ident is not `_`, otherwise a value that's unique to the specific
/// `_` in the expanded AST that introduced this binding.
disambiguator: u32,
}
type Resolutions<'a> = RefCell<FxIndexMap<BindingKey, &'a RefCell<NameResolution<'a>>>>;
/// One node in the tree of modules.
///
/// Note that a "module" in resolve is broader than a `mod` that you declare in Rust code. It may be one of these:
///
/// * `mod`
/// * crate root (aka, top-level anonymous module)
/// * `enum`
/// * `trait`
/// * curly-braced block with statements
///
/// You can use [`ModuleData::kind`] to determine the kind of module this is.
pub struct ModuleData<'a> {
/// The direct parent module (it may not be a `mod`, however).
parent: Option<Module<'a>>,
/// What kind of module this is, because this may not be a `mod`.
kind: ModuleKind,
/// Mapping between names and their (possibly in-progress) resolutions in this module.
/// Resolutions in modules from other crates are not populated until accessed.
lazy_resolutions: Resolutions<'a>,
/// True if this is a module from other crate that needs to be populated on access.
populate_on_access: Cell<bool>,
/// Macro invocations that can expand into items in this module.
unexpanded_invocations: RefCell<FxHashSet<LocalExpnId>>,
/// Whether `#[no_implicit_prelude]` is active.
no_implicit_prelude: bool,
glob_importers: RefCell<Vec<&'a Import<'a>>>,
globs: RefCell<Vec<&'a Import<'a>>>,
/// Used to memoize the traits in this module for faster searches through all traits in scope.
traits: RefCell<Option<Box<[(Ident, &'a NameBinding<'a>)]>>>,
/// Span of the module itself. Used for error reporting.
span: Span,
expansion: ExpnId,
}
type Module<'a> = &'a ModuleData<'a>;
impl<'a> ModuleData<'a> {
fn new(
parent: Option<Module<'a>>,
kind: ModuleKind,
expansion: ExpnId,
span: Span,
no_implicit_prelude: bool,
) -> Self {
let is_foreign = match kind {
ModuleKind::Def(_, def_id, _) => !def_id.is_local(),
ModuleKind::Block(_) => false,
};
ModuleData {
parent,
kind,
lazy_resolutions: Default::default(),
populate_on_access: Cell::new(is_foreign),
unexpanded_invocations: Default::default(),
no_implicit_prelude,
glob_importers: RefCell::new(Vec::new()),
globs: RefCell::new(Vec::new()),
traits: RefCell::new(None),
span,
expansion,
}
}
fn for_each_child<R, F>(&'a self, resolver: &mut R, mut f: F)
where
R: AsMut<Resolver<'a>>,
F: FnMut(&mut R, Ident, Namespace, &'a NameBinding<'a>),
{
for (key, name_resolution) in resolver.as_mut().resolutions(self).borrow().iter() {
if let Some(binding) = name_resolution.borrow().binding {
f(resolver, key.ident, key.ns, binding);
}
}
}
/// This modifies `self` in place. The traits will be stored in `self.traits`.
fn ensure_traits<R>(&'a self, resolver: &mut R)
where
R: AsMut<Resolver<'a>>,
{
let mut traits = self.traits.borrow_mut();
if traits.is_none() {
let mut collected_traits = Vec::new();
self.for_each_child(resolver, |_, name, ns, binding| {
if ns != TypeNS {
return;
}
if let Res::Def(DefKind::Trait | DefKind::TraitAlias, _) = binding.res() {
collected_traits.push((name, binding))
}
});
*traits = Some(collected_traits.into_boxed_slice());
}
}
fn res(&self) -> Option<Res> {
match self.kind {
ModuleKind::Def(kind, def_id, _) => Some(Res::Def(kind, def_id)),
_ => None,
}
}
// Public for rustdoc.
pub fn def_id(&self) -> DefId {
self.opt_def_id().expect("`ModuleData::def_id` is called on a block module")
}
fn opt_def_id(&self) -> Option<DefId> {
match self.kind {
ModuleKind::Def(_, def_id, _) => Some(def_id),
_ => None,
}
}
// `self` resolves to the first module ancestor that `is_normal`.
fn is_normal(&self) -> bool {
matches!(self.kind, ModuleKind::Def(DefKind::Mod, _, _))
}
fn is_trait(&self) -> bool {
matches!(self.kind, ModuleKind::Def(DefKind::Trait, _, _))
}
fn nearest_item_scope(&'a self) -> Module<'a> {
match self.kind {
ModuleKind::Def(DefKind::Enum | DefKind::Trait, ..) => {
self.parent.expect("enum or trait module without a parent")
}
_ => self,
}
}
/// The [`DefId`] of the nearest `mod` item ancestor (which may be this module).
/// This may be the crate root.
fn nearest_parent_mod(&self) -> DefId {
match self.kind {
ModuleKind::Def(DefKind::Mod, def_id, _) => def_id,
_ => self.parent.expect("non-root module without parent").nearest_parent_mod(),
}
}
fn is_ancestor_of(&self, mut other: &Self) -> bool {
while !ptr::eq(self, other) {
if let Some(parent) = other.parent {
other = parent;
} else {
return false;
}
}
true
}
}
impl<'a> fmt::Debug for ModuleData<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self.res())
}
}
/// Records a possibly-private value, type, or module definition.
#[derive(Clone, Debug)]
pub struct NameBinding<'a> {
kind: NameBindingKind<'a>,
ambiguity: Option<(&'a NameBinding<'a>, AmbiguityKind)>,
expansion: LocalExpnId,
span: Span,
vis: ty::Visibility,
}
pub trait ToNameBinding<'a> {
fn to_name_binding(self, arenas: &'a ResolverArenas<'a>) -> &'a NameBinding<'a>;
}
impl<'a> ToNameBinding<'a> for &'a NameBinding<'a> {
fn to_name_binding(self, _: &'a ResolverArenas<'a>) -> &'a NameBinding<'a> {
self
}
}
#[derive(Clone, Debug)]
enum NameBindingKind<'a> {
Res(Res, /* is_macro_export */ bool),
Module(Module<'a>),
Import { binding: &'a NameBinding<'a>, import: &'a Import<'a>, used: Cell<bool> },
}
impl<'a> NameBindingKind<'a> {
/// Is this a name binding of an import?
fn is_import(&self) -> bool {
matches!(*self, NameBindingKind::Import { .. })
}
}
struct PrivacyError<'a> {
ident: Ident,
binding: &'a NameBinding<'a>,
dedup_span: Span,
}
struct UseError<'a> {
err: DiagnosticBuilder<'a>,
/// Candidates which user could `use` to access the missing type.
candidates: Vec<ImportSuggestion>,
/// The `DefId` of the module to place the use-statements in.
def_id: DefId,
/// Whether the diagnostic should say "instead" (as in `consider importing ... instead`).
instead: bool,
/// Extra free-form suggestion.
suggestion: Option<(Span, &'static str, String, Applicability)>,
}
#[derive(Clone, Copy, PartialEq, Debug)]
enum AmbiguityKind {
Import,
BuiltinAttr,
DeriveHelper,
MacroRulesVsModularized,
GlobVsOuter,
GlobVsGlob,
GlobVsExpanded,
MoreExpandedVsOuter,
}
impl AmbiguityKind {
fn descr(self) -> &'static str {
match self {
AmbiguityKind::Import => "multiple potential import sources",
AmbiguityKind::BuiltinAttr => "a name conflict with a builtin attribute",
AmbiguityKind::DeriveHelper => "a name conflict with a derive helper attribute",
AmbiguityKind::MacroRulesVsModularized => {
"a conflict between a `macro_rules` name and a non-`macro_rules` name from another module"
}
AmbiguityKind::GlobVsOuter => {
"a conflict between a name from a glob import and an outer scope during import or macro resolution"
}
AmbiguityKind::GlobVsGlob => "multiple glob imports of a name in the same module",
AmbiguityKind::GlobVsExpanded => {
"a conflict between a name from a glob import and a macro-expanded name in the same module during import or macro resolution"
}
AmbiguityKind::MoreExpandedVsOuter => {
"a conflict between a macro-expanded name and a less macro-expanded name from outer scope during import or macro resolution"
}
}
}
}
/// Miscellaneous bits of metadata for better ambiguity error reporting.
#[derive(Clone, Copy, PartialEq)]
enum AmbiguityErrorMisc {
SuggestCrate,
SuggestSelf,
FromPrelude,
None,
}
struct AmbiguityError<'a> {
kind: AmbiguityKind,
ident: Ident,
b1: &'a NameBinding<'a>,
b2: &'a NameBinding<'a>,
misc1: AmbiguityErrorMisc,
misc2: AmbiguityErrorMisc,
}
impl<'a> NameBinding<'a> {
fn module(&self) -> Option<Module<'a>> {
match self.kind {
NameBindingKind::Module(module) => Some(module),
NameBindingKind::Import { binding, .. } => binding.module(),
_ => None,
}
}
fn res(&self) -> Res {
match self.kind {
NameBindingKind::Res(res, _) => res,
NameBindingKind::Module(module) => module.res().unwrap(),
NameBindingKind::Import { binding, .. } => binding.res(),
}
}
fn is_ambiguity(&self) -> bool {
self.ambiguity.is_some()
|| match self.kind {
NameBindingKind::Import { binding, .. } => binding.is_ambiguity(),
_ => false,
}
}
fn is_possibly_imported_variant(&self) -> bool {
match self.kind {
NameBindingKind::Import { binding, .. } => binding.is_possibly_imported_variant(),
NameBindingKind::Res(
Res::Def(DefKind::Variant | DefKind::Ctor(CtorOf::Variant, ..), _),
_,
) => true,
NameBindingKind::Res(..) | NameBindingKind::Module(..) => false,
}
}
fn is_extern_crate(&self) -> bool {
match self.kind {
NameBindingKind::Import {
import: &Import { kind: ImportKind::ExternCrate { .. }, .. },
..
} => true,
NameBindingKind::Module(&ModuleData {
kind: ModuleKind::Def(DefKind::Mod, def_id, _),
..
}) => def_id.index == CRATE_DEF_INDEX,
_ => false,
}
}
fn is_import(&self) -> bool {
matches!(self.kind, NameBindingKind::Import { .. })
}
fn is_glob_import(&self) -> bool {
match self.kind {
NameBindingKind::Import { import, .. } => import.is_glob(),
_ => false,
}
}
fn is_importable(&self) -> bool {
!matches!(
self.res(),
Res::Def(DefKind::AssocConst | DefKind::AssocFn | DefKind::AssocTy, _)
)
}
fn is_macro_def(&self) -> bool {
matches!(self.kind, NameBindingKind::Res(Res::Def(DefKind::Macro(..), _), _))
}
fn macro_kind(&self) -> Option<MacroKind> {
self.res().macro_kind()
}
// Suppose that we resolved macro invocation with `invoc_parent_expansion` to binding `binding`
// at some expansion round `max(invoc, binding)` when they both emerged from macros.
// Then this function returns `true` if `self` may emerge from a macro *after* that
// in some later round and screw up our previously found resolution.
// See more detailed explanation in
// https://github.com/rust-lang/rust/pull/53778#issuecomment-419224049
fn may_appear_after(
&self,
invoc_parent_expansion: LocalExpnId,
binding: &NameBinding<'_>,
) -> bool {
// self > max(invoc, binding) => !(self <= invoc || self <= binding)
// Expansions are partially ordered, so "may appear after" is an inversion of
// "certainly appears before or simultaneously" and includes unordered cases.
let self_parent_expansion = self.expansion;
let other_parent_expansion = binding.expansion;
let certainly_before_other_or_simultaneously =
other_parent_expansion.is_descendant_of(self_parent_expansion);
let certainly_before_invoc_or_simultaneously =
invoc_parent_expansion.is_descendant_of(self_parent_expansion);
!(certainly_before_other_or_simultaneously || certainly_before_invoc_or_simultaneously)
}
}
#[derive(Debug, Default, Clone)]
pub struct ExternPreludeEntry<'a> {
extern_crate_item: Option<&'a NameBinding<'a>>,
pub introduced_by_item: bool,
}
/// Used for better errors for E0773
enum BuiltinMacroState {
NotYetSeen(SyntaxExtensionKind),
AlreadySeen(Span),
}
struct DeriveData {
resolutions: DeriveResolutions,
helper_attrs: Vec<(usize, Ident)>,
has_derive_copy: bool,
}
/// The main resolver class.
///
/// This is the visitor that walks the whole crate.
pub struct Resolver<'a> {
session: &'a Session,
definitions: Definitions,
graph_root: Module<'a>,
prelude: Option<Module<'a>>,
extern_prelude: FxHashMap<Ident, ExternPreludeEntry<'a>>,
/// N.B., this is used only for better diagnostics, not name resolution itself.
has_self: FxHashSet<DefId>,
/// Names of fields of an item `DefId` accessible with dot syntax.
/// Used for hints during error reporting.
field_names: FxHashMap<DefId, Vec<Spanned<Symbol>>>,
/// All imports known to succeed or fail.
determined_imports: Vec<&'a Import<'a>>,
/// All non-determined imports.
indeterminate_imports: Vec<&'a Import<'a>>,
/// FIXME: Refactor things so that these fields are passed through arguments and not resolver.
/// We are resolving a last import segment during import validation.
last_import_segment: bool,
/// This binding should be ignored during in-module resolution, so that we don't get
/// "self-confirming" import resolutions during import validation.
unusable_binding: Option<&'a NameBinding<'a>>,
// Spans for local variables found during pattern resolution.
// Used for suggestions during error reporting.
pat_span_map: NodeMap<Span>,
/// Resolutions for nodes that have a single resolution.
partial_res_map: NodeMap<PartialRes>,
/// Resolutions for import nodes, which have multiple resolutions in different namespaces.
import_res_map: NodeMap<PerNS<Option<Res>>>,
/// Resolutions for labels (node IDs of their corresponding blocks or loops).
label_res_map: NodeMap<NodeId>,
/// `CrateNum` resolutions of `extern crate` items.
extern_crate_map: FxHashMap<LocalDefId, CrateNum>,
reexport_map: FxHashMap<LocalDefId, Vec<ModChild>>,
trait_map: NodeMap<Vec<TraitCandidate>>,
/// A map from nodes to anonymous modules.
/// Anonymous modules are pseudo-modules that are implicitly created around items
/// contained within blocks.
///
/// For example, if we have this:
///
/// fn f() {
/// fn g() {
/// ...
/// }
/// }
///
/// There will be an anonymous module created around `g` with the ID of the
/// entry block for `f`.
block_map: NodeMap<Module<'a>>,
/// A fake module that contains no definition and no prelude. Used so that
/// some AST passes can generate identifiers that only resolve to local or
/// language items.
empty_module: Module<'a>,
module_map: FxHashMap<DefId, Module<'a>>,
binding_parent_modules: FxHashMap<PtrKey<'a, NameBinding<'a>>, Module<'a>>,
underscore_disambiguator: u32,
/// Maps glob imports to the names of items actually imported.
glob_map: FxHashMap<LocalDefId, FxHashSet<Symbol>>,
/// Visibilities in "lowered" form, for all entities that have them.
visibilities: FxHashMap<LocalDefId, ty::Visibility>,
used_imports: FxHashSet<NodeId>,
maybe_unused_trait_imports: FxHashSet<LocalDefId>,
maybe_unused_extern_crates: Vec<(LocalDefId, Span)>,
/// Privacy errors are delayed until the end in order to deduplicate them.
privacy_errors: Vec<PrivacyError<'a>>,
/// Ambiguity errors are delayed for deduplication.
ambiguity_errors: Vec<AmbiguityError<'a>>,
/// `use` injections are delayed for better placement and deduplication.
use_injections: Vec<UseError<'a>>,
/// Crate-local macro expanded `macro_export` referred to by a module-relative path.
macro_expanded_macro_export_errors: BTreeSet<(Span, Span)>,
arenas: &'a ResolverArenas<'a>,
dummy_binding: &'a NameBinding<'a>,
crate_loader: CrateLoader<'a>,
macro_names: FxHashSet<Ident>,
builtin_macros: FxHashMap<Symbol, BuiltinMacroState>,
registered_attrs: FxHashSet<Ident>,
registered_tools: RegisteredTools,
macro_use_prelude: FxHashMap<Symbol, &'a NameBinding<'a>>,
all_macros: FxHashMap<Symbol, Res>,
macro_map: FxHashMap<DefId, Lrc<SyntaxExtension>>,
dummy_ext_bang: Lrc<SyntaxExtension>,
dummy_ext_derive: Lrc<SyntaxExtension>,
non_macro_attr: Lrc<SyntaxExtension>,
local_macro_def_scopes: FxHashMap<LocalDefId, Module<'a>>,
ast_transform_scopes: FxHashMap<LocalExpnId, Module<'a>>,
unused_macros: FxHashMap<LocalDefId, (NodeId, Ident)>,
proc_macro_stubs: FxHashSet<LocalDefId>,
/// Traces collected during macro resolution and validated when it's complete.
single_segment_macro_resolutions:
Vec<(Ident, MacroKind, ParentScope<'a>, Option<&'a NameBinding<'a>>)>,
multi_segment_macro_resolutions:
Vec<(Vec<Segment>, Span, MacroKind, ParentScope<'a>, Option<Res>)>,
builtin_attrs: Vec<(Ident, ParentScope<'a>)>,
/// `derive(Copy)` marks items they are applied to so they are treated specially later.
/// Derive macros cannot modify the item themselves and have to store the markers in the global
/// context, so they attach the markers to derive container IDs using this resolver table.
containers_deriving_copy: FxHashSet<LocalExpnId>,
/// Parent scopes in which the macros were invoked.
/// FIXME: `derives` are missing in these parent scopes and need to be taken from elsewhere.
invocation_parent_scopes: FxHashMap<LocalExpnId, ParentScope<'a>>,
/// `macro_rules` scopes *produced* by expanding the macro invocations,
/// include all the `macro_rules` items and other invocations generated by them.
output_macro_rules_scopes: FxHashMap<LocalExpnId, MacroRulesScopeRef<'a>>,
/// Helper attributes that are in scope for the given expansion.
helper_attrs: FxHashMap<LocalExpnId, Vec<Ident>>,
/// Ready or in-progress results of resolving paths inside the `#[derive(...)]` attribute
/// with the given `ExpnId`.
derive_data: FxHashMap<LocalExpnId, DeriveData>,
/// Avoid duplicated errors for "name already defined".
name_already_seen: FxHashMap<Symbol, Span>,
potentially_unused_imports: Vec<&'a Import<'a>>,
/// Table for mapping struct IDs into struct constructor IDs,
/// it's not used during normal resolution, only for better error reporting.
/// Also includes of list of each fields visibility
struct_constructors: DefIdMap<(Res, ty::Visibility, Vec<ty::Visibility>)>,
/// Features enabled for this crate.
active_features: FxHashSet<Symbol>,
lint_buffer: LintBuffer,
next_node_id: NodeId,
node_id_to_def_id: FxHashMap<ast::NodeId, LocalDefId>,
def_id_to_node_id: IndexVec<LocalDefId, ast::NodeId>,
/// Indices of unnamed struct or variant fields with unresolved attributes.
placeholder_field_indices: FxHashMap<NodeId, usize>,
/// When collecting definitions from an AST fragment produced by a macro invocation `ExpnId`
/// we know what parent node that fragment should be attached to thanks to this table,
/// and how the `impl Trait` fragments were introduced.
invocation_parents: FxHashMap<LocalExpnId, (LocalDefId, ImplTraitContext)>,
next_disambiguator: FxHashMap<(LocalDefId, DefPathData), u32>,
/// Some way to know that we are in a *trait* impl in `visit_assoc_item`.
/// FIXME: Replace with a more general AST map (together with some other fields).
trait_impl_items: FxHashSet<LocalDefId>,
legacy_const_generic_args: FxHashMap<DefId, Option<Vec<usize>>>,
/// Amount of lifetime parameters for each item in the crate.
item_generics_num_lifetimes: FxHashMap<LocalDefId, usize>,
main_def: Option<MainDefinition>,
trait_impls: BTreeMap<DefId, Vec<LocalDefId>>,
/// A list of proc macro LocalDefIds, written out in the order in which
/// they are declared in the static array generated by proc_macro_harness.
proc_macros: Vec<NodeId>,
confused_type_with_std_module: FxHashMap<Span, Span>,
access_levels: AccessLevels,
}
/// Nothing really interesting here; it just provides memory for the rest of the crate.
#[derive(Default)]
pub struct ResolverArenas<'a> {
modules: TypedArena<ModuleData<'a>>,
local_modules: RefCell<Vec<Module<'a>>>,
imports: TypedArena<Import<'a>>,
name_resolutions: TypedArena<RefCell<NameResolution<'a>>>,
ast_paths: TypedArena<ast::Path>,
dropless: DroplessArena,
}
impl<'a> ResolverArenas<'a> {
fn new_module(
&'a self,
parent: Option<Module<'a>>,
kind: ModuleKind,
expn_id: ExpnId,
span: Span,
no_implicit_prelude: bool,
module_map: &mut FxHashMap<DefId, Module<'a>>,
) -> Module<'a> {
let module =
self.modules.alloc(ModuleData::new(parent, kind, expn_id, span, no_implicit_prelude));
let def_id = module.opt_def_id();
if def_id.map_or(true, |def_id| def_id.is_local()) {
self.local_modules.borrow_mut().push(module);
}
if let Some(def_id) = def_id {
module_map.insert(def_id, module);
}
module
}
fn local_modules(&'a self) -> std::cell::Ref<'a, Vec<Module<'a>>> {
self.local_modules.borrow()
}
fn alloc_name_binding(&'a self, name_binding: NameBinding<'a>) -> &'a NameBinding<'a> {
self.dropless.alloc(name_binding)
}
fn alloc_import(&'a self, import: Import<'a>) -> &'a Import<'_> {
self.imports.alloc(import)
}
fn alloc_name_resolution(&'a self) -> &'a RefCell<NameResolution<'a>> {
self.name_resolutions.alloc(Default::default())
}
fn alloc_macro_rules_scope(&'a self, scope: MacroRulesScope<'a>) -> MacroRulesScopeRef<'a> {
PtrKey(self.dropless.alloc(Cell::new(scope)))
}
fn alloc_macro_rules_binding(
&'a self,
binding: MacroRulesBinding<'a>,
) -> &'a MacroRulesBinding<'a> {
self.dropless.alloc(binding)
}
fn alloc_ast_paths(&'a self, paths: &[ast::Path]) -> &'a [ast::Path] {
self.ast_paths.alloc_from_iter(paths.iter().cloned())
}
fn alloc_pattern_spans(&'a self, spans: impl Iterator<Item = Span>) -> &'a [Span] {
self.dropless.alloc_from_iter(spans)
}
}
impl<'a> AsMut<Resolver<'a>> for Resolver<'a> {
fn as_mut(&mut self) -> &mut Resolver<'a> {
self
}
}
impl<'a, 'b> DefIdTree for &'a Resolver<'b> {
fn parent(self, id: DefId) -> Option<DefId> {
match id.as_local() {
Some(id) => self.definitions.def_key(id).parent,
None => self.cstore().def_key(id).parent,
}
.map(|index| DefId { index, ..id })
}
}
/// This interface is used through the AST→HIR step, to embed full paths into the HIR. After that
/// the resolver is no longer needed as all the relevant information is inline.
impl ResolverAstLowering for Resolver<'_> {
fn def_key(&mut self, id: DefId) -> DefKey {
if let Some(id) = id.as_local() {
self.definitions().def_key(id)
} else {
self.cstore().def_key(id)
}
}
#[inline]
fn def_span(&self, id: LocalDefId) -> Span {
self.definitions.def_span(id)
}
fn item_generics_num_lifetimes(&self, def_id: DefId) -> usize {
if let Some(def_id) = def_id.as_local() {
self.item_generics_num_lifetimes[&def_id]
} else {
self.cstore().item_generics_num_lifetimes(def_id, self.session)
}
}
fn legacy_const_generic_args(&mut self, expr: &Expr) -> Option<Vec<usize>> {
self.legacy_const_generic_args(expr)
}
fn get_partial_res(&self, id: NodeId) -> Option<PartialRes> {
self.partial_res_map.get(&id).cloned()
}
fn get_import_res(&mut self, id: NodeId) -> PerNS<Option<Res>> {
self.import_res_map.get(&id).cloned().unwrap_or_default()
}
fn get_label_res(&mut self, id: NodeId) -> Option<NodeId> {
self.label_res_map.get(&id).cloned()
}
fn definitions(&mut self) -> &mut Definitions {
&mut self.definitions
}
fn create_stable_hashing_context(&self) -> StableHashingContext<'_> {
StableHashingContext::new(self.session, &self.definitions, self.crate_loader.cstore())
}
fn lint_buffer(&mut self) -> &mut LintBuffer {
&mut self.lint_buffer
}
fn next_node_id(&mut self) -> NodeId {
self.next_node_id()
}
fn take_trait_map(&mut self, node: NodeId) -> Option<Vec<TraitCandidate>> {
self.trait_map.remove(&node)
}
fn opt_local_def_id(&self, node: NodeId) -> Option<LocalDefId> {
self.node_id_to_def_id.get(&node).copied()
}
fn local_def_id(&self, node: NodeId) -> LocalDefId {
self.opt_local_def_id(node).unwrap_or_else(|| panic!("no entry for node id: `{:?}`", node))
}
fn def_path_hash(&self, def_id: DefId) -> DefPathHash {
match def_id.as_local() {
Some(def_id) => self.definitions.def_path_hash(def_id),
None => self.cstore().def_path_hash(def_id),
}
}
/// Adds a definition with a parent definition.
fn create_def(
&mut self,
parent: LocalDefId,
node_id: ast::NodeId,
data: DefPathData,
expn_id: ExpnId,
span: Span,
) -> LocalDefId {
assert!(
!self.node_id_to_def_id.contains_key(&node_id),
"adding a def'n for node-id {:?} and data {:?} but a previous def'n exists: {:?}",
node_id,
data,
self.definitions.def_key(self.node_id_to_def_id[&node_id]),
);
// Find the next free disambiguator for this key.
let next_disambiguator = &mut self.next_disambiguator;
let next_disambiguator = |parent, data| {
let next_disamb = next_disambiguator.entry((parent, data)).or_insert(0);
let disambiguator = *next_disamb;
*next_disamb = next_disamb.checked_add(1).expect("disambiguator overflow");
disambiguator
};
let def_id = self.definitions.create_def(parent, data, expn_id, next_disambiguator, span);
// Some things for which we allocate `LocalDefId`s don't correspond to
// anything in the AST, so they don't have a `NodeId`. For these cases
// we don't need a mapping from `NodeId` to `LocalDefId`.
if node_id != ast::DUMMY_NODE_ID {
debug!("create_def: def_id_to_node_id[{:?}] <-> {:?}", def_id, node_id);
self.node_id_to_def_id.insert(node_id, def_id);
}
assert_eq!(self.def_id_to_node_id.push(node_id), def_id);
def_id
}
}
impl<'a> Resolver<'a> {
pub fn new(
session: &'a Session,
krate: &Crate,
crate_name: &str,
metadata_loader: Box<MetadataLoaderDyn>,
arenas: &'a ResolverArenas<'a>,
) -> Resolver<'a> {
let root_def_id = CRATE_DEF_ID.to_def_id();
let mut module_map = FxHashMap::default();
let graph_root = arenas.new_module(
None,
ModuleKind::Def(DefKind::Mod, root_def_id, kw::Empty),
ExpnId::root(),
krate.span,
session.contains_name(&krate.attrs, sym::no_implicit_prelude),
&mut module_map,
);
let empty_module = arenas.new_module(
None,
ModuleKind::Def(DefKind::Mod, root_def_id, kw::Empty),
ExpnId::root(),
DUMMY_SP,
true,
&mut FxHashMap::default(),
);
let definitions = Definitions::new(session.local_stable_crate_id(), krate.span);
let root = definitions.get_root_def();
let mut visibilities = FxHashMap::default();
visibilities.insert(CRATE_DEF_ID, ty::Visibility::Public);
let mut def_id_to_node_id = IndexVec::default();
assert_eq!(def_id_to_node_id.push(CRATE_NODE_ID), root);
let mut node_id_to_def_id = FxHashMap::default();
node_id_to_def_id.insert(CRATE_NODE_ID, root);
let mut invocation_parents = FxHashMap::default();
invocation_parents.insert(LocalExpnId::ROOT, (root, ImplTraitContext::Existential));
let mut extern_prelude: FxHashMap<Ident, ExternPreludeEntry<'_>> = session
.opts
.externs
.iter()
.filter(|(_, entry)| entry.add_prelude)
.map(|(name, _)| (Ident::from_str(name), Default::default()))
.collect();
if !session.contains_name(&krate.attrs, sym::no_core) {
extern_prelude.insert(Ident::with_dummy_span(sym::core), Default::default());
if !session.contains_name(&krate.attrs, sym::no_std) {
extern_prelude.insert(Ident::with_dummy_span(sym::std), Default::default());
}
}
let (registered_attrs, registered_tools) =
macros::registered_attrs_and_tools(session, &krate.attrs);
let features = session.features_untracked();
let mut resolver = Resolver {
session,
definitions,
// The outermost module has def ID 0; this is not reflected in the
// AST.
graph_root,
prelude: None,
extern_prelude,
has_self: FxHashSet::default(),
field_names: FxHashMap::default(),
determined_imports: Vec::new(),
indeterminate_imports: Vec::new(),
last_import_segment: false,
unusable_binding: None,
pat_span_map: Default::default(),
partial_res_map: Default::default(),
import_res_map: Default::default(),
label_res_map: Default::default(),
extern_crate_map: Default::default(),
reexport_map: FxHashMap::default(),
trait_map: NodeMap::default(),
underscore_disambiguator: 0,
empty_module,
module_map,
block_map: Default::default(),
binding_parent_modules: FxHashMap::default(),
ast_transform_scopes: FxHashMap::default(),
glob_map: Default::default(),
visibilities,
used_imports: FxHashSet::default(),
maybe_unused_trait_imports: Default::default(),
maybe_unused_extern_crates: Vec::new(),
privacy_errors: Vec::new(),
ambiguity_errors: Vec::new(),
use_injections: Vec::new(),
macro_expanded_macro_export_errors: BTreeSet::new(),
arenas,
dummy_binding: arenas.alloc_name_binding(NameBinding {
kind: NameBindingKind::Res(Res::Err, false),
ambiguity: None,
expansion: LocalExpnId::ROOT,
span: DUMMY_SP,
vis: ty::Visibility::Public,
}),
crate_loader: CrateLoader::new(session, metadata_loader, crate_name),
macro_names: FxHashSet::default(),
builtin_macros: Default::default(),
registered_attrs,
registered_tools,
macro_use_prelude: FxHashMap::default(),
all_macros: FxHashMap::default(),
macro_map: FxHashMap::default(),
dummy_ext_bang: Lrc::new(SyntaxExtension::dummy_bang(session.edition())),
dummy_ext_derive: Lrc::new(SyntaxExtension::dummy_derive(session.edition())),
non_macro_attr: Lrc::new(SyntaxExtension::non_macro_attr(session.edition())),
invocation_parent_scopes: Default::default(),
output_macro_rules_scopes: Default::default(),
helper_attrs: Default::default(),
derive_data: Default::default(),
local_macro_def_scopes: FxHashMap::default(),
name_already_seen: FxHashMap::default(),
potentially_unused_imports: Vec::new(),
struct_constructors: Default::default(),
unused_macros: Default::default(),
proc_macro_stubs: Default::default(),
single_segment_macro_resolutions: Default::default(),
multi_segment_macro_resolutions: Default::default(),
builtin_attrs: Default::default(),
containers_deriving_copy: Default::default(),
active_features: features
.declared_lib_features
.iter()
.map(|(feat, ..)| *feat)
.chain(features.declared_lang_features.iter().map(|(feat, ..)| *feat))
.collect(),
lint_buffer: LintBuffer::default(),
next_node_id: CRATE_NODE_ID,
node_id_to_def_id,
def_id_to_node_id,
placeholder_field_indices: Default::default(),
invocation_parents,
next_disambiguator: Default::default(),
trait_impl_items: Default::default(),
legacy_const_generic_args: Default::default(),
item_generics_num_lifetimes: Default::default(),
main_def: Default::default(),
trait_impls: Default::default(),
proc_macros: Default::default(),
confused_type_with_std_module: Default::default(),
access_levels: Default::default(),
};
let root_parent_scope = ParentScope::module(graph_root, &resolver);
resolver.invocation_parent_scopes.insert(LocalExpnId::ROOT, root_parent_scope);
resolver
}
fn new_module(
&mut self,
parent: Option<Module<'a>>,
kind: ModuleKind,
expn_id: ExpnId,
span: Span,
no_implicit_prelude: bool,
) -> Module<'a> {
let module_map = &mut self.module_map;
self.arenas.new_module(parent, kind, expn_id, span, no_implicit_prelude, module_map)
}
pub fn next_node_id(&mut self) -> NodeId {
let next =
self.next_node_id.as_u32().checked_add(1).expect("input too large; ran out of NodeIds");
mem::replace(&mut self.next_node_id, ast::NodeId::from_u32(next))
}
pub fn lint_buffer(&mut self) -> &mut LintBuffer {
&mut self.lint_buffer
}
pub fn arenas() -> ResolverArenas<'a> {
Default::default()
}
pub fn into_outputs(self) -> ResolverOutputs {
let proc_macros = self.proc_macros.iter().map(|id| self.local_def_id(*id)).collect();
let definitions = self.definitions;
let visibilities = self.visibilities;
let extern_crate_map = self.extern_crate_map;
let reexport_map = self.reexport_map;
let maybe_unused_trait_imports = self.maybe_unused_trait_imports;
let maybe_unused_extern_crates = self.maybe_unused_extern_crates;
let glob_map = self.glob_map;
let main_def = self.main_def;
let confused_type_with_std_module = self.confused_type_with_std_module;
let access_levels = self.access_levels;
ResolverOutputs {
definitions,
cstore: Box::new(self.crate_loader.into_cstore()),
visibilities,
access_levels,
extern_crate_map,
reexport_map,
glob_map,
maybe_unused_trait_imports,
maybe_unused_extern_crates,
extern_prelude: self
.extern_prelude
.iter()
.map(|(ident, entry)| (ident.name, entry.introduced_by_item))
.collect(),
main_def,
trait_impls: self.trait_impls,
proc_macros,
confused_type_with_std_module,
registered_tools: self.registered_tools,
}
}
pub fn clone_outputs(&self) -> ResolverOutputs {
let proc_macros = self.proc_macros.iter().map(|id| self.local_def_id(*id)).collect();
ResolverOutputs {
definitions: self.definitions.clone(),
access_levels: self.access_levels.clone(),
cstore: Box::new(self.cstore().clone()),
visibilities: self.visibilities.clone(),
extern_crate_map: self.extern_crate_map.clone(),
reexport_map: self.reexport_map.clone(),
glob_map: self.glob_map.clone(),
maybe_unused_trait_imports: self.maybe_unused_trait_imports.clone(),
maybe_unused_extern_crates: self.maybe_unused_extern_crates.clone(),
extern_prelude: self
.extern_prelude
.iter()
.map(|(ident, entry)| (ident.name, entry.introduced_by_item))
.collect(),
main_def: self.main_def,
trait_impls: self.trait_impls.clone(),
proc_macros,
confused_type_with_std_module: self.confused_type_with_std_module.clone(),
registered_tools: self.registered_tools.clone(),
}
}
pub fn cstore(&self) -> &CStore {
self.crate_loader.cstore()
}
fn dummy_ext(&self, macro_kind: MacroKind) -> Lrc<SyntaxExtension> {
match macro_kind {
MacroKind::Bang => self.dummy_ext_bang.clone(),
MacroKind::Derive => self.dummy_ext_derive.clone(),
MacroKind::Attr => self.non_macro_attr.clone(),
}
}
/// Runs the function on each namespace.
fn per_ns<F: FnMut(&mut Self, Namespace)>(&mut self, mut f: F) {
f(self, TypeNS);
f(self, ValueNS);
f(self, MacroNS);
}
fn is_builtin_macro(&mut self, res: Res) -> bool {
self.get_macro(res).map_or(false, |ext| ext.builtin_name.is_some())
}
fn macro_def(&self, mut ctxt: SyntaxContext) -> DefId {
loop {
match ctxt.outer_expn_data().macro_def_id {
Some(def_id) => return def_id,
None => ctxt.remove_mark(),
};
}
}
/// Entry point to crate resolution.
pub fn resolve_crate(&mut self, krate: &Crate) {
self.session.time("resolve_crate", || {
self.session.time("finalize_imports", || ImportResolver { r: self }.finalize_imports());
self.session.time("resolve_access_levels", || {
AccessLevelsVisitor::compute_access_levels(self, krate)
});
self.session.time("finalize_macro_resolutions", || self.finalize_macro_resolutions());
self.session.time("late_resolve_crate", || self.late_resolve_crate(krate));
self.session.time("resolve_main", || self.resolve_main());
self.session.time("resolve_check_unused", || self.check_unused(krate));
self.session.time("resolve_report_errors", || self.report_errors(krate));
self.session.time("resolve_postprocess", || self.crate_loader.postprocess(krate));
});
}
pub fn traits_in_scope(
&mut self,
current_trait: Option<Module<'a>>,
parent_scope: &ParentScope<'a>,
ctxt: SyntaxContext,
assoc_item: Option<(Symbol, Namespace)>,
) -> Vec<TraitCandidate> {
let mut found_traits = Vec::new();
if let Some(module) = current_trait {
if self.trait_may_have_item(Some(module), assoc_item) {
let def_id = module.def_id();
found_traits.push(TraitCandidate { def_id, import_ids: smallvec![] });
}
}
self.visit_scopes(ScopeSet::All(TypeNS, false), parent_scope, ctxt, |this, scope, _, _| {
match scope {
Scope::Module(module, _) => {
this.traits_in_module(module, assoc_item, &mut found_traits);
}
Scope::StdLibPrelude => {
if let Some(module) = this.prelude {
this.traits_in_module(module, assoc_item, &mut found_traits);
}
}
Scope::ExternPrelude | Scope::ToolPrelude | Scope::BuiltinTypes => {}
_ => unreachable!(),
}
None::<()>
});
found_traits
}
fn traits_in_module(
&mut self,
module: Module<'a>,
assoc_item: Option<(Symbol, Namespace)>,
found_traits: &mut Vec<TraitCandidate>,
) {
module.ensure_traits(self);
let traits = module.traits.borrow();
for (trait_name, trait_binding) in traits.as_ref().unwrap().iter() {
if self.trait_may_have_item(trait_binding.module(), assoc_item) {
let def_id = trait_binding.res().def_id();
let import_ids = self.find_transitive_imports(&trait_binding.kind, *trait_name);
found_traits.push(TraitCandidate { def_id, import_ids });
}
}
}
// List of traits in scope is pruned on best effort basis. We reject traits not having an
// associated item with the given name and namespace (if specified). This is a conservative
// optimization, proper hygienic type-based resolution of associated items is done in typeck.
// We don't reject trait aliases (`trait_module == None`) because we don't have access to their
// associated items.
fn trait_may_have_item(
&mut self,
trait_module: Option<Module<'a>>,
assoc_item: Option<(Symbol, Namespace)>,
) -> bool {
match (trait_module, assoc_item) {
(Some(trait_module), Some((name, ns))) => {
self.resolutions(trait_module).borrow().iter().any(|resolution| {
let (&BindingKey { ident: assoc_ident, ns: assoc_ns, .. }, _) = resolution;
assoc_ns == ns && assoc_ident.name == name
})
}
_ => true,
}
}
fn find_transitive_imports(
&mut self,
mut kind: &NameBindingKind<'_>,
trait_name: Ident,
) -> SmallVec<[LocalDefId; 1]> {
let mut import_ids = smallvec![];
while let NameBindingKind::Import { import, binding, .. } = kind {
let id = self.local_def_id(import.id);
self.maybe_unused_trait_imports.insert(id);
self.add_to_glob_map(&import, trait_name);
import_ids.push(id);
kind = &binding.kind;
}
import_ids
}
fn new_key(&mut self, ident: Ident, ns: Namespace) -> BindingKey {
let ident = ident.normalize_to_macros_2_0();
let disambiguator = if ident.name == kw::Underscore {
self.underscore_disambiguator += 1;
self.underscore_disambiguator
} else {
0
};
BindingKey { ident, ns, disambiguator }
}
fn resolutions(&mut self, module: Module<'a>) -> &'a Resolutions<'a> {
if module.populate_on_access.get() {
module.populate_on_access.set(false);
self.build_reduced_graph_external(module);
}
&module.lazy_resolutions
}
fn resolution(
&mut self,
module: Module<'a>,
key: BindingKey,
) -> &'a RefCell<NameResolution<'a>> {
*self
.resolutions(module)
.borrow_mut()
.entry(key)
.or_insert_with(|| self.arenas.alloc_name_resolution())
}
fn record_use(
&mut self,
ident: Ident,
used_binding: &'a NameBinding<'a>,
is_lexical_scope: bool,
) {
if let Some((b2, kind)) = used_binding.ambiguity {
self.ambiguity_errors.push(AmbiguityError {
kind,
ident,
b1: used_binding,
b2,
misc1: AmbiguityErrorMisc::None,
misc2: AmbiguityErrorMisc::None,
});
}
if let NameBindingKind::Import { import, binding, ref used } = used_binding.kind {
// Avoid marking `extern crate` items that refer to a name from extern prelude,
// but not introduce it, as used if they are accessed from lexical scope.
if is_lexical_scope {
if let Some(entry) = self.extern_prelude.get(&ident.normalize_to_macros_2_0()) {
if let Some(crate_item) = entry.extern_crate_item {
if ptr::eq(used_binding, crate_item) && !entry.introduced_by_item {
return;
}
}
}
}
used.set(true);
import.used.set(true);
self.used_imports.insert(import.id);
self.add_to_glob_map(&import, ident);
self.record_use(ident, binding, false);
}
}
#[inline]
fn add_to_glob_map(&mut self, import: &Import<'_>, ident: Ident) {
if import.is_glob() {
let def_id = self.local_def_id(import.id);
self.glob_map.entry(def_id).or_default().insert(ident.name);
}
}
/// A generic scope visitor.
/// Visits scopes in order to resolve some identifier in them or perform other actions.
/// If the callback returns `Some` result, we stop visiting scopes and return it.
fn visit_scopes<T>(
&mut self,
scope_set: ScopeSet<'a>,
parent_scope: &ParentScope<'a>,
ctxt: SyntaxContext,
mut visitor: impl FnMut(
&mut Self,
Scope<'a>,
/*use_prelude*/ bool,
SyntaxContext,
) -> Option<T>,
) -> Option<T> {
// General principles:
// 1. Not controlled (user-defined) names should have higher priority than controlled names
// built into the language or standard library. This way we can add new names into the
// language or standard library without breaking user code.
// 2. "Closed set" below means new names cannot appear after the current resolution attempt.
// Places to search (in order of decreasing priority):
// (Type NS)
// 1. FIXME: Ribs (type parameters), there's no necessary infrastructure yet
// (open set, not controlled).
// 2. Names in modules (both normal `mod`ules and blocks), loop through hygienic parents
// (open, not controlled).
// 3. Extern prelude (open, the open part is from macro expansions, not controlled).
// 4. Tool modules (closed, controlled right now, but not in the future).
// 5. Standard library prelude (de-facto closed, controlled).
// 6. Language prelude (closed, controlled).
// (Value NS)
// 1. FIXME: Ribs (local variables), there's no necessary infrastructure yet
// (open set, not controlled).
// 2. Names in modules (both normal `mod`ules and blocks), loop through hygienic parents
// (open, not controlled).
// 3. Standard library prelude (de-facto closed, controlled).
// (Macro NS)
// 1-3. Derive helpers (open, not controlled). All ambiguities with other names
// are currently reported as errors. They should be higher in priority than preludes
// and probably even names in modules according to the "general principles" above. They
// also should be subject to restricted shadowing because are effectively produced by
// derives (you need to resolve the derive first to add helpers into scope), but they
// should be available before the derive is expanded for compatibility.
// It's mess in general, so we are being conservative for now.
// 1-3. `macro_rules` (open, not controlled), loop through `macro_rules` scopes. Have higher
// priority than prelude macros, but create ambiguities with macros in modules.
// 1-3. Names in modules (both normal `mod`ules and blocks), loop through hygienic parents
// (open, not controlled). Have higher priority than prelude macros, but create
// ambiguities with `macro_rules`.
// 4. `macro_use` prelude (open, the open part is from macro expansions, not controlled).
// 4a. User-defined prelude from macro-use
// (open, the open part is from macro expansions, not controlled).
// 4b. "Standard library prelude" part implemented through `macro-use` (closed, controlled).
// 4c. Standard library prelude (de-facto closed, controlled).
// 6. Language prelude: builtin attributes (closed, controlled).
let rust_2015 = ctxt.edition() == Edition::Edition2015;
let (ns, macro_kind, is_absolute_path) = match scope_set {
ScopeSet::All(ns, _) => (ns, None, false),
ScopeSet::AbsolutePath(ns) => (ns, None, true),
ScopeSet::Macro(macro_kind) => (MacroNS, Some(macro_kind), false),
ScopeSet::Late(ns, ..) => (ns, None, false),
};
let module = match scope_set {
// Start with the specified module.
ScopeSet::Late(_, module, _) => module,
// Jump out of trait or enum modules, they do not act as scopes.
_ => parent_scope.module.nearest_item_scope(),
};
let mut scope = match ns {
_ if is_absolute_path => Scope::CrateRoot,
TypeNS | ValueNS => Scope::Module(module, None),
MacroNS => Scope::DeriveHelpers(parent_scope.expansion),
};
let mut ctxt = ctxt.normalize_to_macros_2_0();
let mut use_prelude = !module.no_implicit_prelude;
loop {
let visit = match scope {
// Derive helpers are not in scope when resolving derives in the same container.
Scope::DeriveHelpers(expn_id) => {
!(expn_id == parent_scope.expansion && macro_kind == Some(MacroKind::Derive))
}
Scope::DeriveHelpersCompat => true,
Scope::MacroRules(macro_rules_scope) => {
// Use "path compression" on `macro_rules` scope chains. This is an optimization
// used to avoid long scope chains, see the comments on `MacroRulesScopeRef`.
// As another consequence of this optimization visitors never observe invocation
// scopes for macros that were already expanded.
while let MacroRulesScope::Invocation(invoc_id) = macro_rules_scope.get() {
if let Some(next_scope) = self.output_macro_rules_scopes.get(&invoc_id) {
macro_rules_scope.set(next_scope.get());
} else {
break;
}
}
true
}
Scope::CrateRoot => true,
Scope::Module(..) => true,
Scope::RegisteredAttrs => use_prelude,
Scope::MacroUsePrelude => use_prelude || rust_2015,
Scope::BuiltinAttrs => true,
Scope::ExternPrelude => use_prelude || is_absolute_path,
Scope::ToolPrelude => use_prelude,
Scope::StdLibPrelude => use_prelude || ns == MacroNS,
Scope::BuiltinTypes => true,
};
if visit {
if let break_result @ Some(..) = visitor(self, scope, use_prelude, ctxt) {
return break_result;
}
}
scope = match scope {
Scope::DeriveHelpers(LocalExpnId::ROOT) => Scope::DeriveHelpersCompat,
Scope::DeriveHelpers(expn_id) => {
// Derive helpers are not visible to code generated by bang or derive macros.
let expn_data = expn_id.expn_data();
match expn_data.kind {
ExpnKind::Root
| ExpnKind::Macro(MacroKind::Bang | MacroKind::Derive, _) => {
Scope::DeriveHelpersCompat
}
_ => Scope::DeriveHelpers(expn_data.parent.expect_local()),
}
}
Scope::DeriveHelpersCompat => Scope::MacroRules(parent_scope.macro_rules),
Scope::MacroRules(macro_rules_scope) => match macro_rules_scope.get() {
MacroRulesScope::Binding(binding) => {
Scope::MacroRules(binding.parent_macro_rules_scope)
}
MacroRulesScope::Invocation(invoc_id) => {
Scope::MacroRules(self.invocation_parent_scopes[&invoc_id].macro_rules)
}
MacroRulesScope::Empty => Scope::Module(module, None),
},
Scope::CrateRoot => match ns {
TypeNS => {
ctxt.adjust(ExpnId::root());
Scope::ExternPrelude
}
ValueNS | MacroNS => break,
},
Scope::Module(module, prev_lint_id) => {
use_prelude = !module.no_implicit_prelude;
let derive_fallback_lint_id = match scope_set {
ScopeSet::Late(.., lint_id) => lint_id,
_ => None,
};
match self.hygienic_lexical_parent(module, &mut ctxt, derive_fallback_lint_id) {
Some((parent_module, lint_id)) => {
Scope::Module(parent_module, lint_id.or(prev_lint_id))
}
None => {
ctxt.adjust(ExpnId::root());
match ns {
TypeNS => Scope::ExternPrelude,
ValueNS => Scope::StdLibPrelude,
MacroNS => Scope::RegisteredAttrs,
}
}
}
}
Scope::RegisteredAttrs => Scope::MacroUsePrelude,
Scope::MacroUsePrelude => Scope::StdLibPrelude,
Scope::BuiltinAttrs => break, // nowhere else to search
Scope::ExternPrelude if is_absolute_path => break,
Scope::ExternPrelude => Scope::ToolPrelude,
Scope::ToolPrelude => Scope::StdLibPrelude,
Scope::StdLibPrelude => match ns {
TypeNS => Scope::BuiltinTypes,
ValueNS => break, // nowhere else to search
MacroNS => Scope::BuiltinAttrs,
},
Scope::BuiltinTypes => break, // nowhere else to search
};
}
None
}
/// This resolves the identifier `ident` in the namespace `ns` in the current lexical scope.
/// More specifically, we proceed up the hierarchy of scopes and return the binding for
/// `ident` in the first scope that defines it (or None if no scopes define it).
///
/// A block's items are above its local variables in the scope hierarchy, regardless of where
/// the items are defined in the block. For example,
/// ```rust
/// fn f() {
/// g(); // Since there are no local variables in scope yet, this resolves to the item.
/// let g = || {};
/// fn g() {}
/// g(); // This resolves to the local variable `g` since it shadows the item.
/// }
/// ```
///
/// Invariant: This must only be called during main resolution, not during
/// import resolution.
fn resolve_ident_in_lexical_scope(
&mut self,
mut ident: Ident,
ns: Namespace,
parent_scope: &ParentScope<'a>,
record_used_id: Option<NodeId>,
path_span: Span,
ribs: &[Rib<'a>],
) -> Option<LexicalScopeBinding<'a>> {
assert!(ns == TypeNS || ns == ValueNS);
let orig_ident = ident;
if ident.name == kw::Empty {
return Some(LexicalScopeBinding::Res(Res::Err));
}
let (general_span, normalized_span) = if ident.name == kw::SelfUpper {
// FIXME(jseyfried) improve `Self` hygiene
let empty_span = ident.span.with_ctxt(SyntaxContext::root());
(empty_span, empty_span)
} else if ns == TypeNS {
let normalized_span = ident.span.normalize_to_macros_2_0();
(normalized_span, normalized_span)
} else {
(ident.span.normalize_to_macro_rules(), ident.span.normalize_to_macros_2_0())
};
ident.span = general_span;
let normalized_ident = Ident { span: normalized_span, ..ident };
// Walk backwards up the ribs in scope.
let record_used = record_used_id.is_some();
let mut module = self.graph_root;
for i in (0..ribs.len()).rev() {
debug!("walk rib\n{:?}", ribs[i].bindings);
// Use the rib kind to determine whether we are resolving parameters
// (macro 2.0 hygiene) or local variables (`macro_rules` hygiene).
let rib_ident = if ribs[i].kind.contains_params() { normalized_ident } else { ident };
if let Some((original_rib_ident_def, res)) = ribs[i].bindings.get_key_value(&rib_ident)
{
// The ident resolves to a type parameter or local variable.
return Some(LexicalScopeBinding::Res(self.validate_res_from_ribs(
i,
rib_ident,
*res,
record_used,
path_span,
*original_rib_ident_def,
ribs,
)));
}
module = match ribs[i].kind {
ModuleRibKind(module) => module,
MacroDefinition(def) if def == self.macro_def(ident.span.ctxt()) => {
// If an invocation of this macro created `ident`, give up on `ident`
// and switch to `ident`'s source from the macro definition.
ident.span.remove_mark();
continue;
}
_ => continue,
};
match module.kind {
ModuleKind::Block(..) => {} // We can see through blocks
_ => break,
}
let item = self.resolve_ident_in_module_unadjusted(
ModuleOrUniformRoot::Module(module),
ident,
ns,
parent_scope,
record_used,
path_span,
);
if let Ok(binding) = item {
// The ident resolves to an item.
return Some(LexicalScopeBinding::Item(binding));
}
}
self.early_resolve_ident_in_lexical_scope(
orig_ident,
ScopeSet::Late(ns, module, record_used_id),
parent_scope,
record_used,
record_used,
path_span,
)
.ok()
.map(LexicalScopeBinding::Item)
}
fn hygienic_lexical_parent(
&mut self,
module: Module<'a>,
ctxt: &mut SyntaxContext,
derive_fallback_lint_id: Option<NodeId>,
) -> Option<(Module<'a>, Option<NodeId>)> {
if !module.expansion.outer_expn_is_descendant_of(*ctxt) {
return Some((self.expn_def_scope(ctxt.remove_mark()), None));
}
if let ModuleKind::Block(..) = module.kind {
return Some((module.parent.unwrap().nearest_item_scope(), None));
}
// We need to support the next case under a deprecation warning
// ```
// struct MyStruct;
// ---- begin: this comes from a proc macro derive
// mod implementation_details {
// // Note that `MyStruct` is not in scope here.
// impl SomeTrait for MyStruct { ... }
// }
// ---- end
// ```
// So we have to fall back to the module's parent during lexical resolution in this case.
if derive_fallback_lint_id.is_some() {
if let Some(parent) = module.parent {
// Inner module is inside the macro, parent module is outside of the macro.
if module.expansion != parent.expansion
&& module.expansion.is_descendant_of(parent.expansion)
{
// The macro is a proc macro derive
if let Some(def_id) = module.expansion.expn_data().macro_def_id {
let ext = self.get_macro_by_def_id(def_id);
if ext.builtin_name.is_none()
&& ext.macro_kind() == MacroKind::Derive
&& parent.expansion.outer_expn_is_descendant_of(*ctxt)
{
return Some((parent, derive_fallback_lint_id));
}
}
}
}
}
None
}
fn resolve_ident_in_module(
&mut self,
module: ModuleOrUniformRoot<'a>,
ident: Ident,
ns: Namespace,
parent_scope: &ParentScope<'a>,
record_used: bool,
path_span: Span,
) -> Result<&'a NameBinding<'a>, Determinacy> {
self.resolve_ident_in_module_ext(module, ident, ns, parent_scope, record_used, path_span)
.map_err(|(determinacy, _)| determinacy)
}
fn resolve_ident_in_module_ext(
&mut self,
module: ModuleOrUniformRoot<'a>,
mut ident: Ident,
ns: Namespace,
parent_scope: &ParentScope<'a>,
record_used: bool,
path_span: Span,
) -> Result<&'a NameBinding<'a>, (Determinacy, Weak)> {
let tmp_parent_scope;
let mut adjusted_parent_scope = parent_scope;
match module {
ModuleOrUniformRoot::Module(m) => {
if let Some(def) = ident.span.normalize_to_macros_2_0_and_adjust(m.expansion) {
tmp_parent_scope =
ParentScope { module: self.expn_def_scope(def), ..*parent_scope };
adjusted_parent_scope = &tmp_parent_scope;
}
}
ModuleOrUniformRoot::ExternPrelude => {
ident.span.normalize_to_macros_2_0_and_adjust(ExpnId::root());
}
ModuleOrUniformRoot::CrateRootAndExternPrelude | ModuleOrUniformRoot::CurrentScope => {
// No adjustments
}
}
self.resolve_ident_in_module_unadjusted_ext(
module,
ident,
ns,
adjusted_parent_scope,
false,
record_used,
path_span,
)
}
fn resolve_crate_root(&mut self, ident: Ident) -> Module<'a> {
debug!("resolve_crate_root({:?})", ident);
let mut ctxt = ident.span.ctxt();
let mark = if ident.name == kw::DollarCrate {
// When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
// we don't want to pretend that the `macro_rules!` definition is in the `macro`
// as described in `SyntaxContext::apply_mark`, so we ignore prepended opaque marks.
// FIXME: This is only a guess and it doesn't work correctly for `macro_rules!`
// definitions actually produced by `macro` and `macro` definitions produced by
// `macro_rules!`, but at least such configurations are not stable yet.
ctxt = ctxt.normalize_to_macro_rules();
debug!(
"resolve_crate_root: marks={:?}",
ctxt.marks().into_iter().map(|(i, t)| (i.expn_data(), t)).collect::<Vec<_>>()
);
let mut iter = ctxt.marks().into_iter().rev().peekable();
let mut result = None;
// Find the last opaque mark from the end if it exists.
while let Some(&(mark, transparency)) = iter.peek() {
if transparency == Transparency::Opaque {
result = Some(mark);
iter.next();
} else {
break;
}
}
debug!(
"resolve_crate_root: found opaque mark {:?} {:?}",
result,
result.map(|r| r.expn_data())
);
// Then find the last semi-transparent mark from the end if it exists.
for (mark, transparency) in iter {
if transparency == Transparency::SemiTransparent {
result = Some(mark);
} else {
break;
}
}
debug!(
"resolve_crate_root: found semi-transparent mark {:?} {:?}",
result,
result.map(|r| r.expn_data())
);
result
} else {
debug!("resolve_crate_root: not DollarCrate");
ctxt = ctxt.normalize_to_macros_2_0();
ctxt.adjust(ExpnId::root())
};
let module = match mark {
Some(def) => self.expn_def_scope(def),
None => {
debug!(
"resolve_crate_root({:?}): found no mark (ident.span = {:?})",
ident, ident.span
);
return self.graph_root;
}
};
let module = self.expect_module(
module.opt_def_id().map_or(LOCAL_CRATE, |def_id| def_id.krate).as_def_id(),
);
debug!(
"resolve_crate_root({:?}): got module {:?} ({:?}) (ident.span = {:?})",
ident,
module,
module.kind.name(),
ident.span
);
module
}
fn resolve_self(&mut self, ctxt: &mut SyntaxContext, module: Module<'a>) -> Module<'a> {
let mut module = self.expect_module(module.nearest_parent_mod());
while module.span.ctxt().normalize_to_macros_2_0() != *ctxt {
let parent = module.parent.unwrap_or_else(|| self.expn_def_scope(ctxt.remove_mark()));
module = self.expect_module(parent.nearest_parent_mod());
}
module
}
fn resolve_path(
&mut self,
path: &[Segment],
opt_ns: Option<Namespace>, // `None` indicates a module path in import
parent_scope: &ParentScope<'a>,
record_used: bool,
path_span: Span,
crate_lint: CrateLint,
) -> PathResult<'a> {
self.resolve_path_with_ribs(
path,
opt_ns,
parent_scope,
record_used,
path_span,
crate_lint,
None,
)
}
fn resolve_path_with_ribs(
&mut self,
path: &[Segment],
opt_ns: Option<Namespace>, // `None` indicates a module path in import
parent_scope: &ParentScope<'a>,
record_used: bool,
path_span: Span,
crate_lint: CrateLint,
ribs: Option<&PerNS<Vec<Rib<'a>>>>,
) -> PathResult<'a> {
let mut module = None;
let mut allow_super = true;
let mut second_binding = None;
debug!(
"resolve_path(path={:?}, opt_ns={:?}, record_used={:?}, \
path_span={:?}, crate_lint={:?})",
path, opt_ns, record_used, path_span, crate_lint,
);
for (i, &Segment { ident, id, has_generic_args: _ }) in path.iter().enumerate() {
debug!("resolve_path ident {} {:?} {:?}", i, ident, id);
let record_segment_res = |this: &mut Self, res| {
if record_used {
if let Some(id) = id {
if !this.partial_res_map.contains_key(&id) {
assert!(id != ast::DUMMY_NODE_ID, "Trying to resolve dummy id");
this.record_partial_res(id, PartialRes::new(res));
}
}
}
};
let is_last = i == path.len() - 1;
let ns = if is_last { opt_ns.unwrap_or(TypeNS) } else { TypeNS };
let name = ident.name;
allow_super &= ns == TypeNS && (name == kw::SelfLower || name == kw::Super);
if ns == TypeNS {
if allow_super && name == kw::Super {
let mut ctxt = ident.span.ctxt().normalize_to_macros_2_0();
let self_module = match i {
0 => Some(self.resolve_self(&mut ctxt, parent_scope.module)),
_ => match module {
Some(ModuleOrUniformRoot::Module(module)) => Some(module),
_ => None,
},
};
if let Some(self_module) = self_module {
if let Some(parent) = self_module.parent {
module = Some(ModuleOrUniformRoot::Module(
self.resolve_self(&mut ctxt, parent),
));
continue;
}
}
let msg = "there are too many leading `super` keywords".to_string();
return PathResult::Failed {
span: ident.span,
label: msg,
suggestion: None,
is_error_from_last_segment: false,
};
}
if i == 0 {
if name == kw::SelfLower {
let mut ctxt = ident.span.ctxt().normalize_to_macros_2_0();
module = Some(ModuleOrUniformRoot::Module(
self.resolve_self(&mut ctxt, parent_scope.module),
));
continue;
}
if name == kw::PathRoot && ident.span.rust_2018() {
module = Some(ModuleOrUniformRoot::ExternPrelude);
continue;
}
if name == kw::PathRoot && ident.span.rust_2015() && self.session.rust_2018() {
// `::a::b` from 2015 macro on 2018 global edition
module = Some(ModuleOrUniformRoot::CrateRootAndExternPrelude);
continue;
}
if name == kw::PathRoot || name == kw::Crate || name == kw::DollarCrate {
// `::a::b`, `crate::a::b` or `$crate::a::b`
module = Some(ModuleOrUniformRoot::Module(self.resolve_crate_root(ident)));
continue;
}
}
}
// Report special messages for path segment keywords in wrong positions.
if ident.is_path_segment_keyword() && i != 0 {
let name_str = if name == kw::PathRoot {
"crate root".to_string()
} else {
format!("`{}`", name)
};
let label = if i == 1 && path[0].ident.name == kw::PathRoot {
format!("global paths cannot start with {}", name_str)
} else {
format!("{} in paths can only be used in start position", name_str)
};
return PathResult::Failed {
span: ident.span,
label,
suggestion: None,
is_error_from_last_segment: false,
};
}
enum FindBindingResult<'a> {
Binding(Result<&'a NameBinding<'a>, Determinacy>),
PathResult(PathResult<'a>),
}
let find_binding_in_ns = |this: &mut Self, ns| {
let binding = if let Some(module) = module {
this.resolve_ident_in_module(
module,
ident,
ns,
parent_scope,
record_used,
path_span,
)
} else if ribs.is_none() || opt_ns.is_none() || opt_ns == Some(MacroNS) {
let scopes = ScopeSet::All(ns, opt_ns.is_none());
this.early_resolve_ident_in_lexical_scope(
ident,
scopes,
parent_scope,
record_used,
record_used,
path_span,
)
} else {
let record_used_id = if record_used {
crate_lint.node_id().or(Some(CRATE_NODE_ID))
} else {
None
};
match this.resolve_ident_in_lexical_scope(
ident,
ns,
parent_scope,
record_used_id,
path_span,
&ribs.unwrap()[ns],
) {
// we found a locally-imported or available item/module
Some(LexicalScopeBinding::Item(binding)) => Ok(binding),
// we found a local variable or type param
Some(LexicalScopeBinding::Res(res))
if opt_ns == Some(TypeNS) || opt_ns == Some(ValueNS) =>
{
record_segment_res(this, res);
return FindBindingResult::PathResult(PathResult::NonModule(
PartialRes::with_unresolved_segments(res, path.len() - 1),
));
}
_ => Err(Determinacy::determined(record_used)),
}
};
FindBindingResult::Binding(binding)
};
let binding = match find_binding_in_ns(self, ns) {
FindBindingResult::PathResult(x) => return x,
FindBindingResult::Binding(binding) => binding,
};
match binding {
Ok(binding) => {
if i == 1 {
second_binding = Some(binding);
}
let res = binding.res();
let maybe_assoc = opt_ns != Some(MacroNS) && PathSource::Type.is_expected(res);
if let Some(next_module) = binding.module() {
module = Some(ModuleOrUniformRoot::Module(next_module));
record_segment_res(self, res);
} else if res == Res::ToolMod && i + 1 != path.len() {
if binding.is_import() {
self.session
.struct_span_err(
ident.span,
"cannot use a tool module through an import",
)
.span_note(binding.span, "the tool module imported here")
.emit();
}
let res = Res::NonMacroAttr(NonMacroAttrKind::Tool);
return PathResult::NonModule(PartialRes::new(res));
} else if res == Res::Err {
return PathResult::NonModule(PartialRes::new(Res::Err));
} else if opt_ns.is_some() && (is_last || maybe_assoc) {
self.lint_if_path_starts_with_module(
crate_lint,
path,
path_span,
second_binding,
);
return PathResult::NonModule(PartialRes::with_unresolved_segments(
res,
path.len() - i - 1,
));
} else {
let label = format!(
"`{}` is {} {}, not a module",
ident,
res.article(),
res.descr(),
);
return PathResult::Failed {
span: ident.span,
label,
suggestion: None,
is_error_from_last_segment: is_last,
};
}
}
Err(Undetermined) => return PathResult::Indeterminate,
Err(Determined) => {
if let Some(ModuleOrUniformRoot::Module(module)) = module {
if opt_ns.is_some() && !module.is_normal() {
return PathResult::NonModule(PartialRes::with_unresolved_segments(
module.res().unwrap(),
path.len() - i,
));
}
}
let module_res = match module {
Some(ModuleOrUniformRoot::Module(module)) => module.res(),
_ => None,
};
let (label, suggestion) = if module_res == self.graph_root.res() {
let is_mod = |res| matches!(res, Res::Def(DefKind::Mod, _));
// Don't look up import candidates if this is a speculative resolve
let mut candidates = if record_used {
self.lookup_import_candidates(ident, TypeNS, parent_scope, is_mod)
} else {
Vec::new()
};
candidates.sort_by_cached_key(|c| {
(c.path.segments.len(), pprust::path_to_string(&c.path))
});
if let Some(candidate) = candidates.get(0) {
(
String::from("unresolved import"),
Some((
vec![(ident.span, pprust::path_to_string(&candidate.path))],
String::from("a similar path exists"),
Applicability::MaybeIncorrect,
)),
)
} else if self.session.edition() == Edition::Edition2015 {
(format!("maybe a missing crate `{}`?", ident), None)
} else {
(format!("could not find `{}` in the crate root", ident), None)
}
} else if i == 0 {
if ident
.name
.as_str()
.chars()
.next()
.map_or(false, |c| c.is_ascii_uppercase())
{
// Check whether the name refers to an item in the value namespace.
let suggestion = if ribs.is_some() {
let match_span = match self.resolve_ident_in_lexical_scope(
ident,
ValueNS,
parent_scope,
None,
path_span,
&ribs.unwrap()[ValueNS],
) {
// Name matches a local variable. For example:
// ```
// fn f() {
// let Foo: &str = "";
// println!("{}", Foo::Bar); // Name refers to local
// // variable `Foo`.
// }
// ```
Some(LexicalScopeBinding::Res(Res::Local(id))) => {
Some(*self.pat_span_map.get(&id).unwrap())
}
// Name matches item from a local name binding
// created by `use` declaration. For example:
// ```
// pub Foo: &str = "";
//
// mod submod {
// use super::Foo;
// println!("{}", Foo::Bar); // Name refers to local
// // binding `Foo`.
// }
// ```
Some(LexicalScopeBinding::Item(name_binding)) => {
Some(name_binding.span)
}
_ => None,
};
if let Some(span) = match_span {
Some((
vec![(span, String::from(""))],
format!("`{}` is defined here, but is not a type", ident),
Applicability::MaybeIncorrect,
))
} else {
None
}
} else {
None
};
(format!("use of undeclared type `{}`", ident), suggestion)
} else {
(
format!("use of undeclared crate or module `{}`", ident),
if ident.name == sym::alloc {
Some((
vec![],
String::from(
"add `extern crate alloc` to use the `alloc` crate",
),
Applicability::MaybeIncorrect,
))
} else {
self.find_similarly_named_module_or_crate(
ident.name,
&parent_scope.module,
)
.map(|sugg| {
(
vec![(ident.span, sugg.to_string())],
String::from(
"there is a crate or module with a similar name",
),
Applicability::MaybeIncorrect,
)
})
},
)
}
} else {
let parent = path[i - 1].ident.name;
let parent = match parent {
// ::foo is mounted at the crate root for 2015, and is the extern
// prelude for 2018+
kw::PathRoot if self.session.edition() > Edition::Edition2015 => {
"the list of imported crates".to_owned()
}
kw::PathRoot | kw::Crate => "the crate root".to_owned(),
_ => {
format!("`{}`", parent)
}
};
let mut msg = format!("could not find `{}` in {}", ident, parent);
if ns == TypeNS || ns == ValueNS {
let ns_to_try = if ns == TypeNS { ValueNS } else { TypeNS };
if let FindBindingResult::Binding(Ok(binding)) =
find_binding_in_ns(self, ns_to_try)
{
let mut found = |what| {
msg = format!(
"expected {}, found {} `{}` in {}",
ns.descr(),
what,
ident,
parent
)
};
if binding.module().is_some() {
found("module")
} else {
match binding.res() {
def::Res::<NodeId>::Def(kind, id) => found(kind.descr(id)),
_ => found(ns_to_try.descr()),
}
}
};
}
(msg, None)
};
return PathResult::Failed {
span: ident.span,
label,
suggestion,
is_error_from_last_segment: is_last,
};
}
}
}
self.lint_if_path_starts_with_module(crate_lint, path, path_span, second_binding);
PathResult::Module(match module {
Some(module) => module,
None if path.is_empty() => ModuleOrUniformRoot::CurrentScope,
_ => span_bug!(path_span, "resolve_path: non-empty path `{:?}` has no module", path),
})
}
fn lint_if_path_starts_with_module(
&mut self,
crate_lint: CrateLint,
path: &[Segment],
path_span: Span,
second_binding: Option<&NameBinding<'_>>,
) {
let (diag_id, diag_span) = match crate_lint {
CrateLint::No => return,
CrateLint::SimplePath(id) => (id, path_span),
CrateLint::UsePath { root_id, root_span } => (root_id, root_span),
CrateLint::QPathTrait { qpath_id, qpath_span } => (qpath_id, qpath_span),
};
let first_name = match path.get(0) {
// In the 2018 edition this lint is a hard error, so nothing to do
Some(seg) if seg.ident.span.rust_2015() && self.session.rust_2015() => seg.ident.name,
_ => return,
};
// We're only interested in `use` paths which should start with
// `{{root}}` currently.
if first_name != kw::PathRoot {
return;
}
match path.get(1) {
// If this import looks like `crate::...` it's already good
Some(Segment { ident, .. }) if ident.name == kw::Crate => return,
// Otherwise go below to see if it's an extern crate
Some(_) => {}
// If the path has length one (and it's `PathRoot` most likely)
// then we don't know whether we're gonna be importing a crate or an
// item in our crate. Defer this lint to elsewhere
None => return,
}
// If the first element of our path was actually resolved to an
// `ExternCrate` (also used for `crate::...`) then no need to issue a
// warning, this looks all good!
if let Some(binding) = second_binding {
if let NameBindingKind::Import { import, .. } = binding.kind {
// Careful: we still want to rewrite paths from renamed extern crates.
if let ImportKind::ExternCrate { source: None, .. } = import.kind {
return;
}
}
}
let diag = BuiltinLintDiagnostics::AbsPathWithModule(diag_span);
self.lint_buffer.buffer_lint_with_diagnostic(
lint::builtin::ABSOLUTE_PATHS_NOT_STARTING_WITH_CRATE,
diag_id,
diag_span,
"absolute paths must start with `self`, `super`, \
`crate`, or an external crate name in the 2018 edition",
diag,
);
}
// Validate a local resolution (from ribs).
fn validate_res_from_ribs(
&mut self,
rib_index: usize,
rib_ident: Ident,
mut res: Res,
record_used: bool,
span: Span,
original_rib_ident_def: Ident,
all_ribs: &[Rib<'a>],
) -> Res {
const CG_BUG_STR: &str = "min_const_generics resolve check didn't stop compilation";
debug!("validate_res_from_ribs({:?})", res);
let ribs = &all_ribs[rib_index + 1..];
// An invalid forward use of a generic parameter from a previous default.
if let ForwardGenericParamBanRibKind = all_ribs[rib_index].kind {
if record_used {
let res_error = if rib_ident.name == kw::SelfUpper {
ResolutionError::SelfInGenericParamDefault
} else {
ResolutionError::ForwardDeclaredGenericParam
};
self.report_error(span, res_error);
}
assert_eq!(res, Res::Err);
return Res::Err;
}
match res {
Res::Local(_) => {
use ResolutionError::*;
let mut res_err = None;
for rib in ribs {
match rib.kind {
NormalRibKind
| ClosureOrAsyncRibKind
| ModuleRibKind(..)
| MacroDefinition(..)
| ForwardGenericParamBanRibKind => {
// Nothing to do. Continue.
}
ItemRibKind(_) | FnItemRibKind | AssocItemRibKind => {
// This was an attempt to access an upvar inside a
// named function item. This is not allowed, so we
// report an error.
if record_used {
// We don't immediately trigger a resolve error, because
// we want certain other resolution errors (namely those
// emitted for `ConstantItemRibKind` below) to take
// precedence.
res_err = Some(CannotCaptureDynamicEnvironmentInFnItem);
}
}
ConstantItemRibKind(_, item) => {
// Still doesn't deal with upvars
if record_used {
let (span, resolution_error) =
if let Some((ident, constant_item_kind)) = item {
let kind_str = match constant_item_kind {
ConstantItemKind::Const => "const",
ConstantItemKind::Static => "static",
};
(
span,
AttemptToUseNonConstantValueInConstant(
ident, "let", kind_str,
),
)
} else {
(
rib_ident.span,
AttemptToUseNonConstantValueInConstant(
original_rib_ident_def,
"const",
"let",
),
)
};
self.report_error(span, resolution_error);
}
return Res::Err;
}
ConstParamTyRibKind => {
if record_used {
self.report_error(span, ParamInTyOfConstParam(rib_ident.name));
}
return Res::Err;
}
}
}
if let Some(res_err) = res_err {
self.report_error(span, res_err);
return Res::Err;
}
}
Res::Def(DefKind::TyParam, _) | Res::SelfTy(..) => {
for rib in ribs {
let has_generic_params: HasGenericParams = match rib.kind {
NormalRibKind
| ClosureOrAsyncRibKind
| AssocItemRibKind
| ModuleRibKind(..)
| MacroDefinition(..)
| ForwardGenericParamBanRibKind => {
// Nothing to do. Continue.
continue;
}
ConstantItemRibKind(trivial, _) => {
let features = self.session.features_untracked();
// HACK(min_const_generics): We currently only allow `N` or `{ N }`.
if !(trivial || features.generic_const_exprs) {
// HACK(min_const_generics): If we encounter `Self` in an anonymous constant
// we can't easily tell if it's generic at this stage, so we instead remember
// this and then enforce the self type to be concrete later on.
if let Res::SelfTy(trait_def, Some((impl_def, _))) = res {
res = Res::SelfTy(trait_def, Some((impl_def, true)));
} else {
if record_used {
self.report_error(
span,
ResolutionError::ParamInNonTrivialAnonConst {
name: rib_ident.name,
is_type: true,
},
);
}
self.session.delay_span_bug(span, CG_BUG_STR);
return Res::Err;
}
}
continue;
}
// This was an attempt to use a type parameter outside its scope.
ItemRibKind(has_generic_params) => has_generic_params,
FnItemRibKind => HasGenericParams::Yes,
ConstParamTyRibKind => {
if record_used {
self.report_error(
span,
ResolutionError::ParamInTyOfConstParam(rib_ident.name),
);
}
return Res::Err;
}
};
if record_used {
self.report_error(
span,
ResolutionError::GenericParamsFromOuterFunction(
res,
has_generic_params,
),
);
}
return Res::Err;
}
}
Res::Def(DefKind::ConstParam, _) => {
let mut ribs = ribs.iter().peekable();
if let Some(Rib { kind: FnItemRibKind, .. }) = ribs.peek() {
// When declaring const parameters inside function signatures, the first rib
// is always a `FnItemRibKind`. In this case, we can skip it, to avoid it
// (spuriously) conflicting with the const param.
ribs.next();
}
for rib in ribs {
let has_generic_params = match rib.kind {
NormalRibKind
| ClosureOrAsyncRibKind
| AssocItemRibKind
| ModuleRibKind(..)
| MacroDefinition(..)
| ForwardGenericParamBanRibKind => continue,
ConstantItemRibKind(trivial, _) => {
let features = self.session.features_untracked();
// HACK(min_const_generics): We currently only allow `N` or `{ N }`.
if !(trivial || features.generic_const_exprs) {
if record_used {
self.report_error(
span,
ResolutionError::ParamInNonTrivialAnonConst {
name: rib_ident.name,
is_type: false,
},
);
}
self.session.delay_span_bug(span, CG_BUG_STR);
return Res::Err;
}
continue;
}
ItemRibKind(has_generic_params) => has_generic_params,
FnItemRibKind => HasGenericParams::Yes,
ConstParamTyRibKind => {
if record_used {
self.report_error(
span,
ResolutionError::ParamInTyOfConstParam(rib_ident.name),
);
}
return Res::Err;
}
};
// This was an attempt to use a const parameter outside its scope.
if record_used {
self.report_error(
span,
ResolutionError::GenericParamsFromOuterFunction(
res,
has_generic_params,
),
);
}
return Res::Err;
}
}
_ => {}
}
res
}
fn record_partial_res(&mut self, node_id: NodeId, resolution: PartialRes) {
debug!("(recording res) recording {:?} for {}", resolution, node_id);
if let Some(prev_res) = self.partial_res_map.insert(node_id, resolution) {
panic!("path resolved multiple times ({:?} before, {:?} now)", prev_res, resolution);
}
}
fn record_pat_span(&mut self, node: NodeId, span: Span) {
debug!("(recording pat) recording {:?} for {:?}", node, span);
self.pat_span_map.insert(node, span);
}
fn is_accessible_from(&self, vis: ty::Visibility, module: Module<'a>) -> bool {
vis.is_accessible_from(module.nearest_parent_mod(), self)
}
fn set_binding_parent_module(&mut self, binding: &'a NameBinding<'a>, module: Module<'a>) {
if let Some(old_module) = self.binding_parent_modules.insert(PtrKey(binding), module) {
if !ptr::eq(module, old_module) {
span_bug!(binding.span, "parent module is reset for binding");
}
}
}
fn disambiguate_macro_rules_vs_modularized(
&self,
macro_rules: &'a NameBinding<'a>,
modularized: &'a NameBinding<'a>,
) -> bool {
// Some non-controversial subset of ambiguities "modularized macro name" vs "macro_rules"
// is disambiguated to mitigate regressions from macro modularization.
// Scoping for `macro_rules` behaves like scoping for `let` at module level, in general.
match (
self.binding_parent_modules.get(&PtrKey(macro_rules)),
self.binding_parent_modules.get(&PtrKey(modularized)),
) {
(Some(macro_rules), Some(modularized)) => {
macro_rules.nearest_parent_mod() == modularized.nearest_parent_mod()
&& modularized.is_ancestor_of(macro_rules)
}
_ => false,
}
}
fn report_errors(&mut self, krate: &Crate) {
self.report_with_use_injections(krate);
for &(span_use, span_def) in &self.macro_expanded_macro_export_errors {
let msg = "macro-expanded `macro_export` macros from the current crate \
cannot be referred to by absolute paths";
self.lint_buffer.buffer_lint_with_diagnostic(
lint::builtin::MACRO_EXPANDED_MACRO_EXPORTS_ACCESSED_BY_ABSOLUTE_PATHS,
CRATE_NODE_ID,
span_use,
msg,
BuiltinLintDiagnostics::MacroExpandedMacroExportsAccessedByAbsolutePaths(span_def),
);
}
for ambiguity_error in &self.ambiguity_errors {
self.report_ambiguity_error(ambiguity_error);
}
let mut reported_spans = FxHashSet::default();
for error in &self.privacy_errors {
if reported_spans.insert(error.dedup_span) {
self.report_privacy_error(error);
}
}
}
fn report_with_use_injections(&mut self, krate: &Crate) {
for UseError { mut err, candidates, def_id, instead, suggestion } in
self.use_injections.drain(..)
{
let (span, found_use) = if let Some(def_id) = def_id.as_local() {
UsePlacementFinder::check(krate, self.def_id_to_node_id[def_id])
} else {
(None, false)
};
if !candidates.is_empty() {
diagnostics::show_candidates(
&self.definitions,
self.session,
&mut err,
span,
&candidates,
instead,
found_use,
);
} else if let Some((span, msg, sugg, appl)) = suggestion {
err.span_suggestion(span, msg, sugg, appl);
}
err.emit();
}
}
fn report_conflict<'b>(
&mut self,
parent: Module<'_>,
ident: Ident,
ns: Namespace,
new_binding: &NameBinding<'b>,
old_binding: &NameBinding<'b>,
) {
// Error on the second of two conflicting names
if old_binding.span.lo() > new_binding.span.lo() {
return self.report_conflict(parent, ident, ns, old_binding, new_binding);
}
let container = match parent.kind {
ModuleKind::Def(kind, _, _) => kind.descr(parent.def_id()),
ModuleKind::Block(..) => "block",
};
let old_noun = match old_binding.is_import() {
true => "import",
false => "definition",
};
let new_participle = match new_binding.is_import() {
true => "imported",
false => "defined",
};
let (name, span) =
(ident.name, self.session.source_map().guess_head_span(new_binding.span));
if let Some(s) = self.name_already_seen.get(&name) {
if s == &span {
return;
}
}
let old_kind = match (ns, old_binding.module()) {
(ValueNS, _) => "value",
(MacroNS, _) => "macro",
(TypeNS, _) if old_binding.is_extern_crate() => "extern crate",
(TypeNS, Some(module)) if module.is_normal() => "module",
(TypeNS, Some(module)) if module.is_trait() => "trait",
(TypeNS, _) => "type",
};
let msg = format!("the name `{}` is defined multiple times", name);
let mut err = match (old_binding.is_extern_crate(), new_binding.is_extern_crate()) {
(true, true) => struct_span_err!(self.session, span, E0259, "{}", msg),
(true, _) | (_, true) => match new_binding.is_import() && old_binding.is_import() {
true => struct_span_err!(self.session, span, E0254, "{}", msg),
false => struct_span_err!(self.session, span, E0260, "{}", msg),
},
_ => match (old_binding.is_import(), new_binding.is_import()) {
(false, false) => struct_span_err!(self.session, span, E0428, "{}", msg),
(true, true) => struct_span_err!(self.session, span, E0252, "{}", msg),
_ => struct_span_err!(self.session, span, E0255, "{}", msg),
},
};
err.note(&format!(
"`{}` must be defined only once in the {} namespace of this {}",
name,
ns.descr(),
container
));
err.span_label(span, format!("`{}` re{} here", name, new_participle));
err.span_label(
self.session.source_map().guess_head_span(old_binding.span),
format!("previous {} of the {} `{}` here", old_noun, old_kind, name),
);
// See https://github.com/rust-lang/rust/issues/32354
use NameBindingKind::Import;
let import = match (&new_binding.kind, &old_binding.kind) {
// If there are two imports where one or both have attributes then prefer removing the
// import without attributes.
(Import { import: new, .. }, Import { import: old, .. })
if {
!new_binding.span.is_dummy()
&& !old_binding.span.is_dummy()
&& (new.has_attributes || old.has_attributes)
} =>
{
if old.has_attributes {
Some((new, new_binding.span, true))
} else {
Some((old, old_binding.span, true))
}
}
// Otherwise prioritize the new binding.
(Import { import, .. }, other) if !new_binding.span.is_dummy() => {
Some((import, new_binding.span, other.is_import()))
}
(other, Import { import, .. }) if !old_binding.span.is_dummy() => {
Some((import, old_binding.span, other.is_import()))
}
_ => None,
};
// Check if the target of the use for both bindings is the same.
let duplicate = new_binding.res().opt_def_id() == old_binding.res().opt_def_id();
let has_dummy_span = new_binding.span.is_dummy() || old_binding.span.is_dummy();
let from_item =
self.extern_prelude.get(&ident).map_or(true, |entry| entry.introduced_by_item);
// Only suggest removing an import if both bindings are to the same def, if both spans
// aren't dummy spans. Further, if both bindings are imports, then the ident must have
// been introduced by an item.
let should_remove_import = duplicate
&& !has_dummy_span
&& ((new_binding.is_extern_crate() || old_binding.is_extern_crate()) || from_item);
match import {
Some((import, span, true)) if should_remove_import && import.is_nested() => {
self.add_suggestion_for_duplicate_nested_use(&mut err, import, span)
}
Some((import, _, true)) if should_remove_import && !import.is_glob() => {
// Simple case - remove the entire import. Due to the above match arm, this can
// only be a single use so just remove it entirely.
err.tool_only_span_suggestion(
import.use_span_with_attributes,
"remove unnecessary import",
String::new(),
Applicability::MaybeIncorrect,
);
}
Some((import, span, _)) => {
self.add_suggestion_for_rename_of_use(&mut err, name, import, span)
}
_ => {}
}
err.emit();
self.name_already_seen.insert(name, span);
}
/// This function adds a suggestion to change the binding name of a new import that conflicts
/// with an existing import.
///
/// ```text,ignore (diagnostic)
/// help: you can use `as` to change the binding name of the import
/// |
/// LL | use foo::bar as other_bar;
/// | ^^^^^^^^^^^^^^^^^^^^^
/// ```
fn add_suggestion_for_rename_of_use(
&self,
err: &mut DiagnosticBuilder<'_>,
name: Symbol,
import: &Import<'_>,
binding_span: Span,
) {
let suggested_name = if name.as_str().chars().next().unwrap().is_uppercase() {
format!("Other{}", name)
} else {
format!("other_{}", name)
};
let mut suggestion = None;
match import.kind {
ImportKind::Single { type_ns_only: true, .. } => {
suggestion = Some(format!("self as {}", suggested_name))
}
ImportKind::Single { source, .. } => {
if let Some(pos) =
source.span.hi().0.checked_sub(binding_span.lo().0).map(|pos| pos as usize)
{
if let Ok(snippet) = self.session.source_map().span_to_snippet(binding_span) {
if pos <= snippet.len() {
suggestion = Some(format!(
"{} as {}{}",
&snippet[..pos],
suggested_name,
if snippet.ends_with(';') { ";" } else { "" }
))
}
}
}
}
ImportKind::ExternCrate { source, target, .. } => {
suggestion = Some(format!(
"extern crate {} as {};",
source.unwrap_or(target.name),
suggested_name,
))
}
_ => unreachable!(),
}
let rename_msg = "you can use `as` to change the binding name of the import";
if let Some(suggestion) = suggestion {
err.span_suggestion(
binding_span,
rename_msg,
suggestion,
Applicability::MaybeIncorrect,
);
} else {
err.span_label(binding_span, rename_msg);
}
}
/// This function adds a suggestion to remove an unnecessary binding from an import that is
/// nested. In the following example, this function will be invoked to remove the `a` binding
/// in the second use statement:
///
/// ```ignore (diagnostic)
/// use issue_52891::a;
/// use issue_52891::{d, a, e};
/// ```
///
/// The following suggestion will be added:
///
/// ```ignore (diagnostic)
/// use issue_52891::{d, a, e};
/// ^-- help: remove unnecessary import
/// ```
///
/// If the nested use contains only one import then the suggestion will remove the entire
/// line.
///
/// It is expected that the provided import is nested - this isn't checked by the
/// function. If this invariant is not upheld, this function's behaviour will be unexpected
/// as characters expected by span manipulations won't be present.
fn add_suggestion_for_duplicate_nested_use(
&self,
err: &mut DiagnosticBuilder<'_>,
import: &Import<'_>,
binding_span: Span,
) {
assert!(import.is_nested());
let message = "remove unnecessary import";
// Two examples will be used to illustrate the span manipulations we're doing:
//
// - Given `use issue_52891::{d, a, e};` where `a` is a duplicate then `binding_span` is
// `a` and `import.use_span` is `issue_52891::{d, a, e};`.
// - Given `use issue_52891::{d, e, a};` where `a` is a duplicate then `binding_span` is
// `a` and `import.use_span` is `issue_52891::{d, e, a};`.
let (found_closing_brace, span) =
find_span_of_binding_until_next_binding(self.session, binding_span, import.use_span);
// If there was a closing brace then identify the span to remove any trailing commas from
// previous imports.
if found_closing_brace {
if let Some(span) = extend_span_to_previous_binding(self.session, span) {
err.tool_only_span_suggestion(
span,
message,
String::new(),
Applicability::MaybeIncorrect,
);
} else {
// Remove the entire line if we cannot extend the span back, this indicates an
// `issue_52891::{self}` case.
err.span_suggestion(
import.use_span_with_attributes,
message,
String::new(),
Applicability::MaybeIncorrect,
);
}
return;
}
err.span_suggestion(span, message, String::new(), Applicability::MachineApplicable);
}
fn extern_prelude_get(
&mut self,
ident: Ident,
speculative: bool,
) -> Option<&'a NameBinding<'a>> {
if ident.is_path_segment_keyword() {
// Make sure `self`, `super` etc produce an error when passed to here.
return None;
}
self.extern_prelude.get(&ident.normalize_to_macros_2_0()).cloned().and_then(|entry| {
if let Some(binding) = entry.extern_crate_item {
if !speculative && entry.introduced_by_item {
self.record_use(ident, binding, false);
}
Some(binding)
} else {
let crate_id = if !speculative {
let Some(crate_id) =
self.crate_loader.process_path_extern(ident.name, ident.span) else { return Some(self.dummy_binding); };
crate_id
} else {
self.crate_loader.maybe_process_path_extern(ident.name)?
};
let crate_root = self.expect_module(crate_id.as_def_id());
Some(
(crate_root, ty::Visibility::Public, DUMMY_SP, LocalExpnId::ROOT)
.to_name_binding(self.arenas),
)
}
})
}
/// Rustdoc uses this to resolve things in a recoverable way. `ResolutionError<'a>`
/// isn't something that can be returned because it can't be made to live that long,
/// and also it's a private type. Fortunately rustdoc doesn't need to know the error,
/// just that an error occurred.
// FIXME(Manishearth): intra-doc links won't get warned of epoch changes.
pub fn resolve_str_path_error(
&mut self,
span: Span,
path_str: &str,
ns: Namespace,
module_id: DefId,
) -> Result<(ast::Path, Res), ()> {
let path = if path_str.starts_with("::") {
ast::Path {
span,
segments: iter::once(Ident::with_dummy_span(kw::PathRoot))
.chain(path_str.split("::").skip(1).map(Ident::from_str))
.map(|i| self.new_ast_path_segment(i))
.collect(),
tokens: None,
}
} else {
ast::Path {
span,
segments: path_str
.split("::")
.map(Ident::from_str)
.map(|i| self.new_ast_path_segment(i))
.collect(),
tokens: None,
}
};
let module = self.expect_module(module_id);
let parent_scope = &ParentScope::module(module, self);
let res = self.resolve_ast_path(&path, ns, parent_scope).map_err(|_| ())?;
Ok((path, res))
}
// Resolve a path passed from rustdoc or HIR lowering.
fn resolve_ast_path(
&mut self,
path: &ast::Path,
ns: Namespace,
parent_scope: &ParentScope<'a>,
) -> Result<Res, (Span, ResolutionError<'a>)> {
match self.resolve_path(
&Segment::from_path(path),
Some(ns),
parent_scope,
false,
path.span,
CrateLint::No,
) {
PathResult::Module(ModuleOrUniformRoot::Module(module)) => Ok(module.res().unwrap()),
PathResult::NonModule(path_res) if path_res.unresolved_segments() == 0 => {
Ok(path_res.base_res())
}
PathResult::NonModule(..) => Err((
path.span,
ResolutionError::FailedToResolve {
label: String::from("type-relative paths are not supported in this context"),
suggestion: None,
},
)),
PathResult::Module(..) | PathResult::Indeterminate => unreachable!(),
PathResult::Failed { span, label, suggestion, .. } => {
Err((span, ResolutionError::FailedToResolve { label, suggestion }))
}
}
}
fn new_ast_path_segment(&mut self, ident: Ident) -> ast::PathSegment {
let mut seg = ast::PathSegment::from_ident(ident);
seg.id = self.next_node_id();
seg
}
// For rustdoc.
pub fn graph_root(&self) -> Module<'a> {
self.graph_root
}
// For rustdoc.
pub fn all_macros(&self) -> &FxHashMap<Symbol, Res> {
&self.all_macros
}
/// For rustdoc.
/// For local modules returns only reexports, for external modules returns all children.
pub fn module_children_or_reexports(&self, def_id: DefId) -> Vec<ModChild> {
if let Some(def_id) = def_id.as_local() {
self.reexport_map.get(&def_id).cloned().unwrap_or_default()
} else {
self.cstore().module_children_untracked(def_id, self.session)
}
}
/// Retrieves the span of the given `DefId` if `DefId` is in the local crate.
#[inline]
pub fn opt_span(&self, def_id: DefId) -> Option<Span> {
def_id.as_local().map(|def_id| self.definitions.def_span(def_id))
}
/// Checks if an expression refers to a function marked with
/// `#[rustc_legacy_const_generics]` and returns the argument index list
/// from the attribute.
pub fn legacy_const_generic_args(&mut self, expr: &Expr) -> Option<Vec<usize>> {
if let ExprKind::Path(None, path) = &expr.kind {
// Don't perform legacy const generics rewriting if the path already
// has generic arguments.
if path.segments.last().unwrap().args.is_some() {
return None;
}
let partial_res = self.partial_res_map.get(&expr.id)?;
if partial_res.unresolved_segments() != 0 {
return None;
}
if let Res::Def(def::DefKind::Fn, def_id) = partial_res.base_res() {
// We only support cross-crate argument rewriting. Uses
// within the same crate should be updated to use the new
// const generics style.
if def_id.is_local() {
return None;
}
if let Some(v) = self.legacy_const_generic_args.get(&def_id) {
return v.clone();
}
let attr = self
.cstore()
.item_attrs_untracked(def_id, self.session)
.find(|a| a.has_name(sym::rustc_legacy_const_generics))?;
let mut ret = Vec::new();
for meta in attr.meta_item_list()? {
match meta.literal()?.kind {
LitKind::Int(a, _) => ret.push(a as usize),
_ => panic!("invalid arg index"),
}
}
// Cache the lookup to avoid parsing attributes for an iterm multiple times.
self.legacy_const_generic_args.insert(def_id, Some(ret.clone()));
return Some(ret);
}
}
None
}
fn resolve_main(&mut self) {
let module = self.graph_root;
let ident = Ident::with_dummy_span(sym::main);
let parent_scope = &ParentScope::module(module, self);
let name_binding = match self.resolve_ident_in_module(
ModuleOrUniformRoot::Module(module),
ident,
ValueNS,
parent_scope,
false,
DUMMY_SP,
) {
Ok(name_binding) => name_binding,
_ => return,
};
let res = name_binding.res();
let is_import = name_binding.is_import();
let span = name_binding.span;
if let Res::Def(DefKind::Fn, _) = res {
self.record_use(ident, name_binding, false);
}
self.main_def = Some(MainDefinition { res, is_import, span });
}
}
fn names_to_string(names: &[Symbol]) -> String {
let mut result = String::new();
for (i, name) in names.iter().filter(|name| **name != kw::PathRoot).enumerate() {
if i > 0 {
result.push_str("::");
}
if Ident::with_dummy_span(*name).is_raw_guess() {
result.push_str("r#");
}
result.push_str(name.as_str());
}
result
}
fn path_names_to_string(path: &Path) -> String {
names_to_string(&path.segments.iter().map(|seg| seg.ident.name).collect::<Vec<_>>())
}
/// A somewhat inefficient routine to obtain the name of a module.
fn module_to_string(module: Module<'_>) -> Option<String> {
let mut names = Vec::new();
fn collect_mod(names: &mut Vec<Symbol>, module: Module<'_>) {
if let ModuleKind::Def(.., name) = module.kind {
if let Some(parent) = module.parent {
names.push(name);
collect_mod(names, parent);
}
} else {
names.push(Symbol::intern("<opaque>"));
collect_mod(names, module.parent.unwrap());
}
}
collect_mod(&mut names, module);
if names.is_empty() {
return None;
}
names.reverse();
Some(names_to_string(&names))
}
#[derive(Copy, Clone, Debug)]
enum CrateLint {
/// Do not issue the lint.
No,
/// This lint applies to some arbitrary path; e.g., `impl ::foo::Bar`.
/// In this case, we can take the span of that path.
SimplePath(NodeId),
/// This lint comes from a `use` statement. In this case, what we
/// care about really is the *root* `use` statement; e.g., if we
/// have nested things like `use a::{b, c}`, we care about the
/// `use a` part.
UsePath { root_id: NodeId, root_span: Span },
/// This is the "trait item" from a fully qualified path. For example,
/// we might be resolving `X::Y::Z` from a path like `<T as X::Y>::Z`.
/// The `path_span` is the span of the to the trait itself (`X::Y`).
QPathTrait { qpath_id: NodeId, qpath_span: Span },
}
impl CrateLint {
fn node_id(&self) -> Option<NodeId> {
match *self {
CrateLint::No => None,
CrateLint::SimplePath(id)
| CrateLint::UsePath { root_id: id, .. }
| CrateLint::QPathTrait { qpath_id: id, .. } => Some(id),
}
}
}
pub fn provide(providers: &mut Providers) {
late::lifetimes::provide(providers);
}
| 40.092484 | 141 | 0.533615 |
14f6077be7a2cfb54aa389cd4d3a0318ee43a0a5 | 557 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() {
let i = box 100;
let mut j;
j = i;
assert_eq!(*j, 100);
}
| 32.764706 | 68 | 0.698384 |
d903aead331c430f1256cf278cf584236796a487 | 434 | // Copyright (c) 2020 Xu Shaohua <[email protected]>. All rights reserved.
// Use of this source is governed by Apache-2.0 License that can be found
// in the LICENSE file.
mod fcntl;
mod ioctls;
mod page;
mod posix_types;
mod ptrace;
mod signal;
mod stat;
mod uapi_signal;
pub use fcntl::*;
pub use ioctls::*;
pub use page::*;
pub use posix_types::*;
pub use ptrace::*;
pub use signal::*;
pub use stat::*;
pub use uapi_signal::*;
| 19.727273 | 75 | 0.705069 |
d9ef9447941d710475795a1897aaa75307347322 | 631 | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use gdk;
use gdk_wayland_sys;
use glib::translate::*;
use std::fmt;
use WaylandSurface;
glib_wrapper! {
pub struct WaylandPopup(Object<gdk_wayland_sys::GdkWaylandPopup>) @extends WaylandSurface, gdk::Surface, @implements gdk::Popup;
match fn {
get_type => || gdk_wayland_sys::gdk_wayland_popup_get_type(),
}
}
impl WaylandPopup {}
impl fmt::Display for WaylandPopup {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "WaylandPopup")
}
}
| 24.269231 | 132 | 0.684628 |
8f921708cb503f5d217293fc500428cd81d31cb5 | 6,568 | // Copyright 2018 Grove Enterprises LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate clap;
extern crate datafusion;
use std::fs::File;
use std::io::BufRead;
use std::io::BufReader;
use std::rc::Rc;
use std::str;
use std::time::Instant;
use clap::{App, Arg};
use datafusion::exec::*;
use datafusion::functions::geospatial::st_astext::*;
use datafusion::functions::geospatial::st_point::*;
use datafusion::functions::math::*;
use datafusion::sqlast::ASTNode::SQLCreateTable;
use datafusion::sqlparser::*;
mod linereader;
const VERSION: &'static str = env!("CARGO_PKG_VERSION");
#[cfg(target_family = "unix")]
fn setup_console(cmdline: clap::ArgMatches) {
//parse args
//let etcd_endpoints = cmdline.value_of("ETCD").unwrap();
let mut console = Console::new(/*etcd_endpoints.to_string()*/);
match cmdline.value_of("SCRIPT") {
Some(filename) => match File::open(filename) {
Ok(f) => {
let mut cmd_buffer = String::new();
let mut reader = BufReader::new(&f);
for line in reader.lines() {
match line {
Ok(cmd) => {
cmd_buffer.push_str(&cmd);
if cmd_buffer.as_str().ends_with(";") {
console.execute(&cmd_buffer[0..cmd_buffer.len() - 2]);
cmd_buffer = String::new();
}
}
Err(e) => println!("Error: {}", e),
}
}
if cmd_buffer.as_str().ends_with(";") {
console.execute(&cmd_buffer[0..cmd_buffer.len() - 2]);
}
}
Err(e) => println!("Could not open file {}: {}", filename, e),
},
_ => {
let mut reader = linereader::LineReader::new();
loop {
let result = reader.read_lines();
match result {
Some(line) => match line {
linereader::LineResult::Break => break,
linereader::LineResult::Input(command) => console.execute(&command),
},
None => (),
}
}
}
}
}
#[cfg(target_family = "windows")]
fn setup_console(cmdline: clap::ArgMatches) {
panic!("Console is not supported on windows!")
}
fn main() {
println!("DataFusion Console");
// println!("");
// println!("Enter SQL statements terminated with semicolon, or 'quit' to leave.");
// println!("");
let cmdline = App::new("DataFusion Console")
.version(VERSION)
// .arg(
// Arg::with_name("ETCD")
// .help("etcd endpoints")
// .short("e")
// .long("etcd")
// .value_name("URL")
// .required(true)
// .takes_value(true),
// )
.arg(
Arg::with_name("SCRIPT")
.help("SQL script to run")
.short("s")
.long("script")
.required(false)
.takes_value(true),
)
.get_matches();
setup_console(cmdline);
}
/// Interactive SQL console
struct Console {
ctx: ExecutionContext,
}
impl Console {
/// Create a new instance of the console
fn new() -> Self {
let mut ctx = ExecutionContext::local();
ctx.register_scalar_function(Rc::new(STPointFunc {}));
ctx.register_scalar_function(Rc::new(STAsText {}));
ctx.register_scalar_function(Rc::new(SqrtFunction {}));
Console { ctx }
}
/// Execute a SQL statement or console command
fn execute(&mut self, sql: &str) {
println!("Executing query ...");
let timer = Instant::now();
// parse the SQL
match Parser::parse_sql(String::from(sql)) {
Ok(ast) => match ast {
SQLCreateTable { .. } => {
self.ctx.sql(&sql).unwrap();
//println!("Registered schema with execution context");
()
}
_ => match self.ctx.create_logical_plan(sql) {
Ok(logical_plan) => {
let physical_plan = PhysicalPlan::Interactive {
plan: logical_plan.clone(),
};
let result = self.ctx.execute(&physical_plan);
match result {
Ok(result) => {
let elapsed = timer.elapsed();
let elapsed_seconds = elapsed.as_secs() as f64
+ elapsed.subsec_nanos() as f64 / 1000000000.0;
match result {
ExecutionResult::Unit => {
println!("Query executed in {} seconds", elapsed_seconds);
}
ExecutionResult::Count(n) => {
println!(
"Query executed in {} seconds and updated {} rows",
elapsed_seconds, n
);
}
ExecutionResult::Str(_) => {
println!("Query executed in {} seconds", elapsed_seconds);
}
}
}
Err(e) => println!("Error: {:?}", e),
}
}
Err(e) => println!("Error: {:?}", e),
},
},
Err(e) => println!("Error: {:?}", e),
}
}
}
| 35.695652 | 98 | 0.456303 |
1abbdac48d2e6890cbbf0c7b718b40d442617fe5 | 1,682 |
pub struct IconDirectionsBoat {
props: crate::Props,
}
impl yew::Component for IconDirectionsBoat {
type Properties = crate::Props;
type Message = ();
fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self
{
Self { props }
}
fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender
{
true
}
fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender
{
false
}
fn view(&self) -> yew::prelude::Html
{
yew::prelude::html! {
<svg
class=self.props.class.unwrap_or("")
width=self.props.size.unwrap_or(24).to_string()
height=self.props.size.unwrap_or(24).to_string()
viewBox="0 0 24 24"
fill=self.props.fill.unwrap_or("none")
stroke=self.props.color.unwrap_or("currentColor")
stroke-width=self.props.stroke_width.unwrap_or(2).to_string()
stroke-linecap=self.props.stroke_linecap.unwrap_or("round")
stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round")
>
<svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M0 0h24v24H0V0z" fill="none"/><path d="M20 21c-1.39 0-2.78-.47-4-1.32-2.44 1.71-5.56 1.71-8 0C6.78 20.53 5.39 21 4 21H2v2h2c1.38 0 2.74-.35 4-.99 2.52 1.29 5.48 1.29 8 0 1.26.65 2.62.99 4 .99h2v-2h-2zM3.95 19H4c1.6 0 3.02-.88 4-2 .98 1.12 2.4 2 4 2s3.02-.88 4-2c.98 1.12 2.4 2 4 2h.05l2.18-7.65-2.23-.73V4h-5V1H9v3H4v6.62l-2.23.73L3.95 19zM6 6h12v3.97L12 8 6 9.97V6z"/></svg>
</svg>
}
}
}
| 36.565217 | 479 | 0.584423 |
14bcd0290f6573103e5115e51733bb3b3ddb7e1f | 15,308 | #[doc = "Register `LACTCONFIG` reader"]
pub struct R(crate::R<LACTCONFIG_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<LACTCONFIG_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<LACTCONFIG_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<LACTCONFIG_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `LACTCONFIG` writer"]
pub struct W(crate::W<LACTCONFIG_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<LACTCONFIG_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<LACTCONFIG_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<LACTCONFIG_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `LACT_RTC_ONLY` reader - "]
pub struct LACT_RTC_ONLY_R(crate::FieldReader<bool, bool>);
impl LACT_RTC_ONLY_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
LACT_RTC_ONLY_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for LACT_RTC_ONLY_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `LACT_RTC_ONLY` writer - "]
pub struct LACT_RTC_ONLY_W<'a> {
w: &'a mut W,
}
impl<'a> LACT_RTC_ONLY_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | ((value as u32 & 0x01) << 7);
self.w
}
}
#[doc = "Field `LACT_CPST_EN` reader - "]
pub struct LACT_CPST_EN_R(crate::FieldReader<bool, bool>);
impl LACT_CPST_EN_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
LACT_CPST_EN_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for LACT_CPST_EN_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `LACT_CPST_EN` writer - "]
pub struct LACT_CPST_EN_W<'a> {
w: &'a mut W,
}
impl<'a> LACT_CPST_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | ((value as u32 & 0x01) << 8);
self.w
}
}
#[doc = "Field `LACT_LAC_EN` reader - "]
pub struct LACT_LAC_EN_R(crate::FieldReader<bool, bool>);
impl LACT_LAC_EN_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
LACT_LAC_EN_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for LACT_LAC_EN_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `LACT_LAC_EN` writer - "]
pub struct LACT_LAC_EN_W<'a> {
w: &'a mut W,
}
impl<'a> LACT_LAC_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | ((value as u32 & 0x01) << 9);
self.w
}
}
#[doc = "Field `LACT_ALARM_EN` reader - "]
pub struct LACT_ALARM_EN_R(crate::FieldReader<bool, bool>);
impl LACT_ALARM_EN_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
LACT_ALARM_EN_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for LACT_ALARM_EN_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `LACT_ALARM_EN` writer - "]
pub struct LACT_ALARM_EN_W<'a> {
w: &'a mut W,
}
impl<'a> LACT_ALARM_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | ((value as u32 & 0x01) << 10);
self.w
}
}
#[doc = "Field `LACT_LEVEL_INT_EN` reader - "]
pub struct LACT_LEVEL_INT_EN_R(crate::FieldReader<bool, bool>);
impl LACT_LEVEL_INT_EN_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
LACT_LEVEL_INT_EN_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for LACT_LEVEL_INT_EN_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `LACT_LEVEL_INT_EN` writer - "]
pub struct LACT_LEVEL_INT_EN_W<'a> {
w: &'a mut W,
}
impl<'a> LACT_LEVEL_INT_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | ((value as u32 & 0x01) << 11);
self.w
}
}
#[doc = "Field `LACT_EDGE_INT_EN` reader - "]
pub struct LACT_EDGE_INT_EN_R(crate::FieldReader<bool, bool>);
impl LACT_EDGE_INT_EN_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
LACT_EDGE_INT_EN_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for LACT_EDGE_INT_EN_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `LACT_EDGE_INT_EN` writer - "]
pub struct LACT_EDGE_INT_EN_W<'a> {
w: &'a mut W,
}
impl<'a> LACT_EDGE_INT_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | ((value as u32 & 0x01) << 12);
self.w
}
}
#[doc = "Field `LACT_DIVIDER` reader - "]
pub struct LACT_DIVIDER_R(crate::FieldReader<u16, u16>);
impl LACT_DIVIDER_R {
#[inline(always)]
pub(crate) fn new(bits: u16) -> Self {
LACT_DIVIDER_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for LACT_DIVIDER_R {
type Target = crate::FieldReader<u16, u16>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `LACT_DIVIDER` writer - "]
pub struct LACT_DIVIDER_W<'a> {
w: &'a mut W,
}
impl<'a> LACT_DIVIDER_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xffff << 13)) | ((value as u32 & 0xffff) << 13);
self.w
}
}
#[doc = "Field `LACT_AUTORELOAD` reader - "]
pub struct LACT_AUTORELOAD_R(crate::FieldReader<bool, bool>);
impl LACT_AUTORELOAD_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
LACT_AUTORELOAD_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for LACT_AUTORELOAD_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `LACT_AUTORELOAD` writer - "]
pub struct LACT_AUTORELOAD_W<'a> {
w: &'a mut W,
}
impl<'a> LACT_AUTORELOAD_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 29)) | ((value as u32 & 0x01) << 29);
self.w
}
}
#[doc = "Field `LACT_INCREASE` reader - "]
pub struct LACT_INCREASE_R(crate::FieldReader<bool, bool>);
impl LACT_INCREASE_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
LACT_INCREASE_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for LACT_INCREASE_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `LACT_INCREASE` writer - "]
pub struct LACT_INCREASE_W<'a> {
w: &'a mut W,
}
impl<'a> LACT_INCREASE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 30)) | ((value as u32 & 0x01) << 30);
self.w
}
}
#[doc = "Field `LACT_EN` reader - "]
pub struct LACT_EN_R(crate::FieldReader<bool, bool>);
impl LACT_EN_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
LACT_EN_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for LACT_EN_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `LACT_EN` writer - "]
pub struct LACT_EN_W<'a> {
w: &'a mut W,
}
impl<'a> LACT_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | ((value as u32 & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bit 7"]
#[inline(always)]
pub fn lact_rtc_only(&self) -> LACT_RTC_ONLY_R {
LACT_RTC_ONLY_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 8"]
#[inline(always)]
pub fn lact_cpst_en(&self) -> LACT_CPST_EN_R {
LACT_CPST_EN_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9"]
#[inline(always)]
pub fn lact_lac_en(&self) -> LACT_LAC_EN_R {
LACT_LAC_EN_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 10"]
#[inline(always)]
pub fn lact_alarm_en(&self) -> LACT_ALARM_EN_R {
LACT_ALARM_EN_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 11"]
#[inline(always)]
pub fn lact_level_int_en(&self) -> LACT_LEVEL_INT_EN_R {
LACT_LEVEL_INT_EN_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 12"]
#[inline(always)]
pub fn lact_edge_int_en(&self) -> LACT_EDGE_INT_EN_R {
LACT_EDGE_INT_EN_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bits 13:28"]
#[inline(always)]
pub fn lact_divider(&self) -> LACT_DIVIDER_R {
LACT_DIVIDER_R::new(((self.bits >> 13) & 0xffff) as u16)
}
#[doc = "Bit 29"]
#[inline(always)]
pub fn lact_autoreload(&self) -> LACT_AUTORELOAD_R {
LACT_AUTORELOAD_R::new(((self.bits >> 29) & 0x01) != 0)
}
#[doc = "Bit 30"]
#[inline(always)]
pub fn lact_increase(&self) -> LACT_INCREASE_R {
LACT_INCREASE_R::new(((self.bits >> 30) & 0x01) != 0)
}
#[doc = "Bit 31"]
#[inline(always)]
pub fn lact_en(&self) -> LACT_EN_R {
LACT_EN_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 7"]
#[inline(always)]
pub fn lact_rtc_only(&mut self) -> LACT_RTC_ONLY_W {
LACT_RTC_ONLY_W { w: self }
}
#[doc = "Bit 8"]
#[inline(always)]
pub fn lact_cpst_en(&mut self) -> LACT_CPST_EN_W {
LACT_CPST_EN_W { w: self }
}
#[doc = "Bit 9"]
#[inline(always)]
pub fn lact_lac_en(&mut self) -> LACT_LAC_EN_W {
LACT_LAC_EN_W { w: self }
}
#[doc = "Bit 10"]
#[inline(always)]
pub fn lact_alarm_en(&mut self) -> LACT_ALARM_EN_W {
LACT_ALARM_EN_W { w: self }
}
#[doc = "Bit 11"]
#[inline(always)]
pub fn lact_level_int_en(&mut self) -> LACT_LEVEL_INT_EN_W {
LACT_LEVEL_INT_EN_W { w: self }
}
#[doc = "Bit 12"]
#[inline(always)]
pub fn lact_edge_int_en(&mut self) -> LACT_EDGE_INT_EN_W {
LACT_EDGE_INT_EN_W { w: self }
}
#[doc = "Bits 13:28"]
#[inline(always)]
pub fn lact_divider(&mut self) -> LACT_DIVIDER_W {
LACT_DIVIDER_W { w: self }
}
#[doc = "Bit 29"]
#[inline(always)]
pub fn lact_autoreload(&mut self) -> LACT_AUTORELOAD_W {
LACT_AUTORELOAD_W { w: self }
}
#[doc = "Bit 30"]
#[inline(always)]
pub fn lact_increase(&mut self) -> LACT_INCREASE_W {
LACT_INCREASE_W { w: self }
}
#[doc = "Bit 31"]
#[inline(always)]
pub fn lact_en(&mut self) -> LACT_EN_W {
LACT_EN_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [lactconfig](index.html) module"]
pub struct LACTCONFIG_SPEC;
impl crate::RegisterSpec for LACTCONFIG_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [lactconfig::R](R) reader structure"]
impl crate::Readable for LACTCONFIG_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [lactconfig::W](W) writer structure"]
impl crate::Writable for LACTCONFIG_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets LACTCONFIG to value 0x6000_2300"]
impl crate::Resettable for LACTCONFIG_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0x6000_2300
}
}
| 29.047438 | 391 | 0.575843 |
d958b3c18cdcd96d2d64e82df5508c9072e5abbe | 24,042 | //! The implementation of the query system itself. This defines the macros that
//! generate the actual methods on tcx which find and execute the provider,
//! manage the caches, and so forth.
use super::queries;
use rustc_middle::dep_graph::{DepKind, DepNode, DepNodeExt, DepNodeIndex, SerializedDepNodeIndex};
use rustc_middle::ty::query::on_disk_cache;
use rustc_middle::ty::tls::{self, ImplicitCtxt};
use rustc_middle::ty::{self, TyCtxt};
use rustc_query_system::dep_graph::HasDepContext;
use rustc_query_system::query::{QueryContext, QueryDescription, QueryJobId, QueryMap};
use rustc_data_structures::sync::Lock;
use rustc_data_structures::thin_vec::ThinVec;
use rustc_errors::Diagnostic;
use rustc_serialize::opaque;
use rustc_span::def_id::{DefId, LocalDefId};
#[derive(Copy, Clone)]
pub struct QueryCtxt<'tcx> {
pub tcx: TyCtxt<'tcx>,
pub queries: &'tcx super::Queries<'tcx>,
}
impl<'tcx> std::ops::Deref for QueryCtxt<'tcx> {
type Target = TyCtxt<'tcx>;
fn deref(&self) -> &Self::Target {
&self.tcx
}
}
impl HasDepContext for QueryCtxt<'tcx> {
type DepKind = rustc_middle::dep_graph::DepKind;
type StableHashingContext = rustc_middle::ich::StableHashingContext<'tcx>;
type DepContext = TyCtxt<'tcx>;
#[inline]
fn dep_context(&self) -> &Self::DepContext {
&self.tcx
}
}
impl QueryContext for QueryCtxt<'tcx> {
fn def_path_str(&self, def_id: DefId) -> String {
self.tcx.def_path_str(def_id)
}
fn current_query_job(&self) -> Option<QueryJobId<Self::DepKind>> {
tls::with_related_context(**self, |icx| icx.query)
}
fn try_collect_active_jobs(&self) -> Option<QueryMap<Self::DepKind>> {
self.queries.try_collect_active_jobs(**self)
}
fn try_load_from_on_disk_cache(&self, dep_node: &DepNode) {
let cb = &super::QUERY_CALLBACKS[dep_node.kind as usize];
(cb.try_load_from_on_disk_cache)(*self, dep_node)
}
fn try_force_from_dep_node(&self, dep_node: &DepNode) -> bool {
// FIXME: This match is just a workaround for incremental bugs and should
// be removed. https://github.com/rust-lang/rust/issues/62649 is one such
// bug that must be fixed before removing this.
match dep_node.kind {
DepKind::hir_owner | DepKind::hir_owner_nodes => {
if let Some(def_id) = dep_node.extract_def_id(**self) {
let def_id = def_id.expect_local();
let hir_id = self.tcx.hir().local_def_id_to_hir_id(def_id);
if def_id != hir_id.owner {
// This `DefPath` does not have a
// corresponding `DepNode` (e.g. a
// struct field), and the ` DefPath`
// collided with the `DefPath` of a
// proper item that existed in the
// previous compilation session.
//
// Since the given `DefPath` does not
// denote the item that previously
// existed, we just fail to mark green.
return false;
}
} else {
// If the node does not exist anymore, we
// just fail to mark green.
return false;
}
}
_ => {
// For other kinds of nodes it's OK to be
// forced.
}
}
debug!("try_force_from_dep_node({:?}) --- trying to force", dep_node);
// We must avoid ever having to call `force_from_dep_node()` for a
// `DepNode::codegen_unit`:
// Since we cannot reconstruct the query key of a `DepNode::codegen_unit`, we
// would always end up having to evaluate the first caller of the
// `codegen_unit` query that *is* reconstructible. This might very well be
// the `compile_codegen_unit` query, thus re-codegenning the whole CGU just
// to re-trigger calling the `codegen_unit` query with the right key. At
// that point we would already have re-done all the work we are trying to
// avoid doing in the first place.
// The solution is simple: Just explicitly call the `codegen_unit` query for
// each CGU, right after partitioning. This way `try_mark_green` will always
// hit the cache instead of having to go through `force_from_dep_node`.
// This assertion makes sure, we actually keep applying the solution above.
debug_assert!(
dep_node.kind != DepKind::codegen_unit,
"calling force_from_dep_node() on DepKind::codegen_unit"
);
let cb = &super::QUERY_CALLBACKS[dep_node.kind as usize];
(cb.force_from_dep_node)(*self, dep_node)
}
// Interactions with on_disk_cache
fn load_diagnostics(&self, prev_dep_node_index: SerializedDepNodeIndex) -> Vec<Diagnostic> {
self.on_disk_cache
.as_ref()
.map(|c| c.load_diagnostics(**self, prev_dep_node_index))
.unwrap_or_default()
}
fn store_diagnostics(&self, dep_node_index: DepNodeIndex, diagnostics: ThinVec<Diagnostic>) {
if let Some(c) = self.on_disk_cache.as_ref() {
c.store_diagnostics(dep_node_index, diagnostics)
}
}
fn store_diagnostics_for_anon_node(
&self,
dep_node_index: DepNodeIndex,
diagnostics: ThinVec<Diagnostic>,
) {
if let Some(c) = self.on_disk_cache.as_ref() {
c.store_diagnostics_for_anon_node(dep_node_index, diagnostics)
}
}
/// Executes a job by changing the `ImplicitCtxt` to point to the
/// new query job while it executes. It returns the diagnostics
/// captured during execution and the actual result.
#[inline(always)]
fn start_query<R>(
&self,
token: QueryJobId<Self::DepKind>,
diagnostics: Option<&Lock<ThinVec<Diagnostic>>>,
compute: impl FnOnce() -> R,
) -> R {
// The `TyCtxt` stored in TLS has the same global interner lifetime
// as `self`, so we use `with_related_context` to relate the 'tcx lifetimes
// when accessing the `ImplicitCtxt`.
tls::with_related_context(**self, move |current_icx| {
// Update the `ImplicitCtxt` to point to our new query job.
let new_icx = ImplicitCtxt {
tcx: **self,
query: Some(token),
diagnostics,
layout_depth: current_icx.layout_depth,
task_deps: current_icx.task_deps,
};
// Use the `ImplicitCtxt` while we execute the query.
tls::enter_context(&new_icx, |_| {
rustc_data_structures::stack::ensure_sufficient_stack(compute)
})
})
}
}
impl<'tcx> QueryCtxt<'tcx> {
pub(super) fn encode_query_results(
self,
encoder: &mut on_disk_cache::CacheEncoder<'a, 'tcx, opaque::FileEncoder>,
query_result_index: &mut on_disk_cache::EncodedQueryResultIndex,
) -> opaque::FileEncodeResult {
macro_rules! encode_queries {
($($query:ident,)*) => {
$(
on_disk_cache::encode_query_results::<_, super::queries::$query<'_>>(
self,
encoder,
query_result_index
)?;
)*
}
}
rustc_cached_queries!(encode_queries!);
Ok(())
}
}
/// This struct stores metadata about each Query.
///
/// Information is retrieved by indexing the `QUERIES` array using the integer value
/// of the `DepKind`. Overall, this allows to implement `QueryContext` using this manual
/// jump table instead of large matches.
pub struct QueryStruct {
/// The red/green evaluation system will try to mark a specific DepNode in the
/// dependency graph as green by recursively trying to mark the dependencies of
/// that `DepNode` as green. While doing so, it will sometimes encounter a `DepNode`
/// where we don't know if it is red or green and we therefore actually have
/// to recompute its value in order to find out. Since the only piece of
/// information that we have at that point is the `DepNode` we are trying to
/// re-evaluate, we need some way to re-run a query from just that. This is what
/// `force_from_dep_node()` implements.
///
/// In the general case, a `DepNode` consists of a `DepKind` and an opaque
/// GUID/fingerprint that will uniquely identify the node. This GUID/fingerprint
/// is usually constructed by computing a stable hash of the query-key that the
/// `DepNode` corresponds to. Consequently, it is not in general possible to go
/// back from hash to query-key (since hash functions are not reversible). For
/// this reason `force_from_dep_node()` is expected to fail from time to time
/// because we just cannot find out, from the `DepNode` alone, what the
/// corresponding query-key is and therefore cannot re-run the query.
///
/// The system deals with this case letting `try_mark_green` fail which forces
/// the root query to be re-evaluated.
///
/// Now, if `force_from_dep_node()` would always fail, it would be pretty useless.
/// Fortunately, we can use some contextual information that will allow us to
/// reconstruct query-keys for certain kinds of `DepNode`s. In particular, we
/// enforce by construction that the GUID/fingerprint of certain `DepNode`s is a
/// valid `DefPathHash`. Since we also always build a huge table that maps every
/// `DefPathHash` in the current codebase to the corresponding `DefId`, we have
/// everything we need to re-run the query.
///
/// Take the `mir_promoted` query as an example. Like many other queries, it
/// just has a single parameter: the `DefId` of the item it will compute the
/// validated MIR for. Now, when we call `force_from_dep_node()` on a `DepNode`
/// with kind `MirValidated`, we know that the GUID/fingerprint of the `DepNode`
/// is actually a `DefPathHash`, and can therefore just look up the corresponding
/// `DefId` in `tcx.def_path_hash_to_def_id`.
///
/// When you implement a new query, it will likely have a corresponding new
/// `DepKind`, and you'll have to support it here in `force_from_dep_node()`. As
/// a rule of thumb, if your query takes a `DefId` or `LocalDefId` as sole parameter,
/// then `force_from_dep_node()` should not fail for it. Otherwise, you can just
/// add it to the "We don't have enough information to reconstruct..." group in
/// the match below.
pub(crate) force_from_dep_node: fn(tcx: QueryCtxt<'_>, dep_node: &DepNode) -> bool,
/// Invoke a query to put the on-disk cached value in memory.
pub(crate) try_load_from_on_disk_cache: fn(QueryCtxt<'_>, &DepNode),
}
macro_rules! handle_cycle_error {
([][$tcx: expr, $error:expr]) => {{
$error.emit();
Value::from_cycle_error($tcx)
}};
([fatal_cycle $($rest:tt)*][$tcx:expr, $error:expr]) => {{
$error.emit();
$tcx.sess.abort_if_errors();
unreachable!()
}};
([cycle_delay_bug $($rest:tt)*][$tcx:expr, $error:expr]) => {{
$error.delay_as_bug();
Value::from_cycle_error($tcx)
}};
([$other:ident $(($($other_args:tt)*))* $(, $($modifiers:tt)*)*][$($args:tt)*]) => {
handle_cycle_error!([$($($modifiers)*)*][$($args)*])
};
}
macro_rules! is_anon {
([]) => {{
false
}};
([anon $($rest:tt)*]) => {{
true
}};
([$other:ident $(($($other_args:tt)*))* $(, $($modifiers:tt)*)*]) => {
is_anon!([$($($modifiers)*)*])
};
}
macro_rules! is_eval_always {
([]) => {{
false
}};
([eval_always $($rest:tt)*]) => {{
true
}};
([$other:ident $(($($other_args:tt)*))* $(, $($modifiers:tt)*)*]) => {
is_eval_always!([$($($modifiers)*)*])
};
}
macro_rules! hash_result {
([][$hcx:expr, $result:expr]) => {{
dep_graph::hash_result($hcx, &$result)
}};
([no_hash $($rest:tt)*][$hcx:expr, $result:expr]) => {{
None
}};
([$other:ident $(($($other_args:tt)*))* $(, $($modifiers:tt)*)*][$($args:tt)*]) => {
hash_result!([$($($modifiers)*)*][$($args)*])
};
}
macro_rules! define_queries {
(<$tcx:tt>
$($(#[$attr:meta])*
[$($modifiers:tt)*] fn $name:ident($($K:tt)*) -> $V:ty,)*) => {
define_queries_struct! {
tcx: $tcx,
input: ($(([$($modifiers)*] [$($attr)*] [$name]))*)
}
mod make_query {
use super::*;
// Create an eponymous constructor for each query.
$(#[allow(nonstandard_style)] $(#[$attr])*
pub fn $name<$tcx>(tcx: QueryCtxt<$tcx>, key: query_keys::$name<$tcx>) -> QueryStackFrame {
let kind = dep_graph::DepKind::$name;
let name = stringify!($name);
let description = ty::print::with_forced_impl_filename_line(
// Force filename-line mode to avoid invoking `type_of` query.
|| queries::$name::describe(tcx, key)
);
let description = if tcx.sess.verbose() {
format!("{} [{}]", description, name)
} else {
description
};
let span = if kind == dep_graph::DepKind::def_span {
// The `def_span` query is used to calculate `default_span`,
// so exit to avoid infinite recursion.
None
} else {
Some(key.default_span(*tcx))
};
let hash = || {
let mut hcx = tcx.create_stable_hashing_context();
let mut hasher = StableHasher::new();
std::mem::discriminant(&kind).hash_stable(&mut hcx, &mut hasher);
key.hash_stable(&mut hcx, &mut hasher);
hasher.finish::<u64>()
};
QueryStackFrame::new(name, description, span, hash)
})*
}
#[allow(nonstandard_style)]
pub mod queries {
use std::marker::PhantomData;
$(pub struct $name<$tcx> {
data: PhantomData<&$tcx ()>
})*
}
$(impl<$tcx> QueryConfig for queries::$name<$tcx> {
type Key = query_keys::$name<$tcx>;
type Value = query_values::$name<$tcx>;
type Stored = query_stored::$name<$tcx>;
const NAME: &'static str = stringify!($name);
}
impl<$tcx> QueryAccessors<QueryCtxt<$tcx>> for queries::$name<$tcx> {
const ANON: bool = is_anon!([$($modifiers)*]);
const EVAL_ALWAYS: bool = is_eval_always!([$($modifiers)*]);
const DEP_KIND: dep_graph::DepKind = dep_graph::DepKind::$name;
type Cache = query_storage::$name<$tcx>;
#[inline(always)]
fn query_state<'a>(tcx: QueryCtxt<$tcx>) -> &'a QueryState<crate::dep_graph::DepKind, Self::Key>
where QueryCtxt<$tcx>: 'a
{
&tcx.queries.$name
}
#[inline(always)]
fn query_cache<'a>(tcx: QueryCtxt<$tcx>) -> &'a QueryCacheStore<Self::Cache>
where 'tcx:'a
{
&tcx.query_caches.$name
}
#[inline]
fn compute(tcx: QueryCtxt<'tcx>, key: Self::Key) -> Self::Value {
let is_local = key.query_crate() == LOCAL_CRATE;
let provider = if is_local {
tcx.queries.local_providers.$name
} else {
tcx.queries.extern_providers.$name
};
provider(*tcx, key)
}
fn hash_result(
_hcx: &mut StableHashingContext<'_>,
_result: &Self::Value
) -> Option<Fingerprint> {
hash_result!([$($modifiers)*][_hcx, _result])
}
fn handle_cycle_error(
tcx: QueryCtxt<'tcx>,
mut error: DiagnosticBuilder<'_>,
) -> Self::Value {
handle_cycle_error!([$($modifiers)*][tcx, error])
}
})*
#[allow(non_upper_case_globals)]
pub mod query_callbacks {
use super::*;
use rustc_middle::dep_graph::DepNode;
use rustc_middle::ty::query::query_keys;
use rustc_query_system::dep_graph::DepNodeParams;
use rustc_query_system::query::{force_query, QueryDescription};
// We use this for most things when incr. comp. is turned off.
pub const Null: QueryStruct = QueryStruct {
force_from_dep_node: |_, dep_node| bug!("force_from_dep_node: encountered {:?}", dep_node),
try_load_from_on_disk_cache: |_, _| {},
};
pub const TraitSelect: QueryStruct = QueryStruct {
force_from_dep_node: |_, _| false,
try_load_from_on_disk_cache: |_, _| {},
};
pub const CompileCodegenUnit: QueryStruct = QueryStruct {
force_from_dep_node: |_, _| false,
try_load_from_on_disk_cache: |_, _| {},
};
$(pub const $name: QueryStruct = {
const is_anon: bool = is_anon!([$($modifiers)*]);
#[inline(always)]
fn can_reconstruct_query_key() -> bool {
<query_keys::$name<'_> as DepNodeParams<TyCtxt<'_>>>
::can_reconstruct_query_key()
}
fn recover<'tcx>(tcx: TyCtxt<'tcx>, dep_node: &DepNode) -> Option<query_keys::$name<'tcx>> {
<query_keys::$name<'_> as DepNodeParams<TyCtxt<'_>>>::recover(tcx, dep_node)
}
fn force_from_dep_node(tcx: QueryCtxt<'_>, dep_node: &DepNode) -> bool {
if is_anon {
return false;
}
if !can_reconstruct_query_key() {
return false;
}
if let Some(key) = recover(*tcx, dep_node) {
force_query::<queries::$name<'_>, _>(tcx, key, DUMMY_SP, *dep_node);
return true;
}
false
}
fn try_load_from_on_disk_cache(tcx: QueryCtxt<'_>, dep_node: &DepNode) {
if is_anon {
return
}
if !can_reconstruct_query_key() {
return
}
debug_assert!(tcx.dep_graph
.node_color(dep_node)
.map(|c| c.is_green())
.unwrap_or(false));
let key = recover(*tcx, dep_node).unwrap_or_else(|| panic!("Failed to recover key for {:?} with hash {}", dep_node, dep_node.hash));
if queries::$name::cache_on_disk(tcx, &key, None) {
let _ = tcx.$name(key);
}
}
QueryStruct {
force_from_dep_node,
try_load_from_on_disk_cache,
}
};)*
}
static QUERY_CALLBACKS: &[QueryStruct] = &make_dep_kind_array!(query_callbacks);
}
}
// FIXME(eddyb) this macro (and others?) use `$tcx` and `'tcx` interchangeably.
// We should either not take `$tcx` at all and use `'tcx` everywhere, or use
// `$tcx` everywhere (even if that isn't necessary due to lack of hygiene).
macro_rules! define_queries_struct {
(tcx: $tcx:tt,
input: ($(([$($modifiers:tt)*] [$($attr:tt)*] [$name:ident]))*)) => {
pub struct Queries<$tcx> {
local_providers: Box<Providers>,
extern_providers: Box<Providers>,
$($(#[$attr])* $name: QueryState<
crate::dep_graph::DepKind,
query_keys::$name<$tcx>,
>,)*
}
impl<$tcx> Queries<$tcx> {
pub fn new(
local_providers: Providers,
extern_providers: Providers,
) -> Self {
Queries {
local_providers: Box::new(local_providers),
extern_providers: Box::new(extern_providers),
$($name: Default::default()),*
}
}
pub(crate) fn try_collect_active_jobs(
&$tcx self,
tcx: TyCtxt<$tcx>,
) -> Option<QueryMap<crate::dep_graph::DepKind>> {
let tcx = QueryCtxt { tcx, queries: self };
let mut jobs = QueryMap::default();
$(
self.$name.try_collect_active_jobs(
tcx,
dep_graph::DepKind::$name,
make_query::$name,
&mut jobs,
)?;
)*
Some(jobs)
}
}
impl QueryEngine<'tcx> for Queries<'tcx> {
unsafe fn deadlock(&'tcx self, _tcx: TyCtxt<'tcx>, _registry: &rustc_rayon_core::Registry) {
#[cfg(parallel_compiler)]
{
let tcx = QueryCtxt { tcx: _tcx, queries: self };
rustc_query_system::query::deadlock(tcx, _registry)
}
}
fn encode_query_results(
&'tcx self,
tcx: TyCtxt<'tcx>,
encoder: &mut on_disk_cache::CacheEncoder<'a, 'tcx, opaque::FileEncoder>,
query_result_index: &mut on_disk_cache::EncodedQueryResultIndex,
) -> opaque::FileEncodeResult {
let tcx = QueryCtxt { tcx, queries: self };
tcx.encode_query_results(encoder, query_result_index)
}
fn exec_cache_promotions(&'tcx self, tcx: TyCtxt<'tcx>) {
let tcx = QueryCtxt { tcx, queries: self };
tcx.dep_graph.exec_cache_promotions(tcx)
}
fn try_mark_green(&'tcx self, tcx: TyCtxt<'tcx>, dep_node: &dep_graph::DepNode) -> bool {
let qcx = QueryCtxt { tcx, queries: self };
tcx.dep_graph.try_mark_green(qcx, dep_node).is_some()
}
fn try_print_query_stack(
&'tcx self,
tcx: TyCtxt<'tcx>,
query: Option<QueryJobId<dep_graph::DepKind>>,
handler: &Handler,
num_frames: Option<usize>,
) -> usize {
let qcx = QueryCtxt { tcx, queries: self };
rustc_query_system::query::print_query_stack(qcx, query, handler, num_frames)
}
$($(#[$attr])*
#[inline(always)]
fn $name(
&'tcx self,
tcx: TyCtxt<$tcx>,
span: Span,
key: query_keys::$name<$tcx>,
lookup: QueryLookup,
mode: QueryMode,
) -> Option<query_stored::$name<$tcx>> {
let qcx = QueryCtxt { tcx, queries: self };
get_query::<queries::$name<$tcx>, _>(qcx, span, key, lookup, mode)
})*
}
};
}
fn describe_as_module(def_id: LocalDefId, tcx: TyCtxt<'_>) -> String {
if def_id.is_top_level_module() {
"top-level module".to_string()
} else {
format!("module `{}`", tcx.def_path_str(def_id.to_def_id()))
}
}
rustc_query_description! {}
| 39.029221 | 152 | 0.543133 |
081cd9db09fa5628ba713c8db9f70a0c00fb937a | 255 | #[derive(Debug, PartialEq)]
pub enum LogLevel{
Quiet, Normal, Verbose
}
#[derive(Debug, PartialEq)]
pub struct Config{
pub log_level: LogLevel,
}
impl Config{
pub fn new(level: LogLevel) -> Config{
Config{log_level: level}
}
} | 15 | 42 | 0.647059 |
5b64e9817c0a4876676ac73323e7bf582cfa0412 | 8,995 | // Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fs::File;
use std::io::Write;
use common_base::tokio;
use common_datablocks::assert_blocks_eq;
use common_datablocks::DataBlock;
use common_datavalues::prelude::*;
use common_exception::ErrorCode;
use common_exception::Result;
use common_streams::CsvSource;
use common_streams::ParquetSource;
use common_streams::Source;
use common_streams::ValueSource;
use futures::io::BufReader;
use opendal::readers::SeekableReader;
use opendal::services::fs;
use opendal::Operator;
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
async fn test_parse_values() {
let buffer =
"(1, 'str', 1) , (-1, ' str ' , 1.1) , ( 2, 'aa aa', 2.2), (3, \"33'33\", 3.3) ";
let schema = DataSchemaRefExt::create(vec![
DataField::new("a", i8::to_data_type()),
DataField::new("b", Vu8::to_data_type()),
DataField::new("c", f64::to_data_type()),
]);
let mut values_source = ValueSource::new(buffer.as_bytes(), schema, 10);
let block = values_source.read().await.unwrap().unwrap();
assert_blocks_eq(
vec![
"+----+-------+-----+",
"| a | b | c |",
"+----+-------+-----+",
"| 1 | str | 1 |",
"| -1 | str | 1.1 |",
"| 2 | aa aa | 2.2 |",
"| 3 | 33'33 | 3.3 |",
"+----+-------+-----+",
],
&[block],
);
let block = values_source.read().await.unwrap();
assert!(block.is_none());
}
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
async fn test_parse_csvs() {
for field_delimitor in [b',', b'\t', b'#'] {
for record_delimitor in [b'\n', b'\r', b'~'] {
let dir = tempfile::tempdir().unwrap();
let name = "my-temporary-note.txt";
let file_path = dir.path().join(name);
let mut file = File::create(file_path).unwrap();
write!(
file,
"1{}\"1\"{}1.11{}2{}\"2\"{}2{}3{}\"3-'3'-3\"{}3\"{}",
field_delimitor as char,
field_delimitor as char,
record_delimitor as char,
field_delimitor as char,
field_delimitor as char,
record_delimitor as char,
field_delimitor as char,
field_delimitor as char,
record_delimitor as char,
)
.unwrap();
let schema = DataSchemaRefExt::create(vec![
DataField::new("a", i8::to_data_type()),
DataField::new("b", Vu8::to_data_type()),
DataField::new("c", f64::to_data_type()),
]);
let local = Operator::new(
fs::Backend::build()
.root(dir.path().to_str().unwrap())
.finish()
.await
.unwrap(),
);
let stream = local.read(name).run().await.unwrap();
let mut csv_source =
CsvSource::try_create(stream, schema, false, field_delimitor, record_delimitor, 10)
.unwrap();
let block = csv_source.read().await.unwrap().unwrap();
assert_blocks_eq(
vec![
"+---+---------+------+",
"| a | b | c |",
"+---+---------+------+",
"| 1 | 1 | 1.11 |",
"| 2 | 2 | 2 |",
"| 3 | 3-'3'-3 | 3 |",
"+---+---------+------+",
],
&[block],
);
let block = csv_source.read().await.unwrap();
assert!(block.is_none());
drop(file);
dir.close().unwrap();
}
}
}
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
async fn test_parse_csv2() {
let dir = tempfile::tempdir().unwrap();
let name = "my-temporary-note.txt";
let file_path = dir.path().join(name);
let mut file = File::create(file_path).unwrap();
write!(
file,
r#"1,'Beijing',100
2,'Shanghai',80
3,'Guangzhou',60
4,'Shenzhen',70
5,'Shenzhen',55
6,'Beijing',99"#
)
.unwrap();
let schema = DataSchemaRefExt::create(vec![
DataField::new("a", i8::to_data_type()),
DataField::new("b", Vu8::to_data_type()),
DataField::new("c", f64::to_data_type()),
]);
let local = Operator::new(
fs::Backend::build()
.root(dir.path().to_str().unwrap())
.finish()
.await
.unwrap(),
);
let stream = local.read(name).run().await.unwrap();
let mut csv_source = CsvSource::try_create(stream, schema, false, b',', b'\n', 10).unwrap();
let block = csv_source.read().await.unwrap().unwrap();
assert_blocks_eq(
vec![
"+---+-------------+-----+",
"| a | b | c |",
"+---+-------------+-----+",
"| 1 | 'Beijing' | 100 |",
"| 2 | 'Shanghai' | 80 |",
"| 3 | 'Guangzhou' | 60 |",
"| 4 | 'Shenzhen' | 70 |",
"| 5 | 'Shenzhen' | 55 |",
"| 6 | 'Beijing' | 99 |",
"+---+-------------+-----+",
],
&[block],
);
let block = csv_source.read().await.unwrap();
assert!(block.is_none());
drop(file);
dir.close().unwrap();
}
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
async fn test_source_parquet() -> Result<()> {
use common_datavalues::prelude::*;
let schema = DataSchemaRefExt::create(vec![
DataField::new("a", i8::to_data_type()),
DataField::new("b", Vu8::to_data_type()),
]);
let arrow_schema = schema.to_arrow();
use common_arrow::arrow::io::parquet::write::*;
let options = WriteOptions {
write_statistics: true,
compression: Compression::Lz4, // let's begin with lz4
version: Version::V2,
};
let col_a = Series::from_data(vec![1i8, 1, 2, 1, 2, 3]);
let col_b = Series::from_data(vec!["1", "1", "2", "1", "2", "3"]);
let sample_block = DataBlock::create(schema.clone(), vec![col_a, col_b]);
use common_arrow::arrow::record_batch::RecordBatch;
let batch = RecordBatch::try_from(sample_block)?;
use common_arrow::parquet::encoding::Encoding;
let encodings = std::iter::repeat(Encoding::Plain)
.take(arrow_schema.fields.len())
.collect::<Vec<_>>();
let page_nums_expects = 3;
let name = "test-parquet";
let dir = tempfile::tempdir().unwrap();
// write test parquet
let len = {
let rg_iter = std::iter::repeat(batch).map(Ok).take(page_nums_expects);
let row_groups = RowGroupIterator::try_new(rg_iter, &arrow_schema, options, encodings)?;
let parquet_schema = row_groups.parquet_schema().clone();
let path = dir.path().join(name);
let mut writer = File::create(path).unwrap();
common_arrow::parquet::write::write_file(
&mut writer,
row_groups,
parquet_schema,
options,
None,
None,
)
.map_err(|e| ErrorCode::ParquetError(e.to_string()))?
};
let local = Operator::new(
fs::Backend::build()
.root(dir.path().to_str().unwrap())
.finish()
.await
.unwrap(),
);
let stream = SeekableReader::new(local, name, len);
let stream = BufReader::with_capacity(4 * 1024 * 1024, stream);
let default_proj = (0..schema.fields().len())
.into_iter()
.collect::<Vec<usize>>();
let mut page_nums = 0;
let mut parquet_source = ParquetSource::new(stream, schema, default_proj);
// expects `page_nums_expects` blocks, and
while let Some(block) = parquet_source.read().await? {
page_nums += 1;
// for each block, the content is the same of `sample_block`
assert_blocks_eq(
vec![
"+---+---+",
"| a | b |",
"+---+---+",
"| 1 | 1 |",
"| 1 | 1 |",
"| 2 | 2 |",
"| 1 | 1 |",
"| 2 | 2 |",
"| 3 | 3 |",
"+---+---+",
],
&[block],
);
}
assert_eq!(page_nums_expects, page_nums);
Ok(())
}
| 32.709091 | 99 | 0.501278 |
3a91fca3b982c56d7bacc0452163e53184b1173e | 1,531 | extern crate cc;
use std::fs::File;
use std::io::{Result, Write};
use std::path::Path;
fn main() {
if let Ok(file_path) = gen_payload_asm() {
cc::Build::new().file(&file_path).compile("payload");
}
}
/// include payload and dtb in sections of asm
fn gen_payload_asm() -> Result<std::path::PathBuf> {
let out_dir = std::env::var("OUT_DIR").unwrap();
let payload = std::env::var("PAYLOAD").unwrap();
let dtb = std::env::var("DTB").unwrap();
if !Path::new(&payload).is_file() {
panic!("Kernel payload `{}` not found", payload)
}
let mut has_dtb = true;
if !Path::new(&dtb).is_file() {
has_dtb = false;
}
let file_path = Path::new(&out_dir).join("payload.S");
let mut f = File::create(&file_path).unwrap();
println!("{:x?} {:x?}", payload, file_path);
write!(f, "# generated by build.rs - do not edit")?;
write!(f, r#"
.section .payload,"a"
.align 12
.global _kernel_payload_start, _kernel_payload_end
_kernel_payload_start:
.incbin "{}"
_kernel_payload_end:
"#, payload)?;
println!("cargo:rerun-if-changed={}", payload);
println!("cargo:rerun-if-env-changed=PAYLOAD");
if has_dtb {
write!(f, r#"
.section .dtb,"a"
.align 12
.global _dtb_start, _dtb_end
_dtb_start:
.incbin "{}"
_dtb_end:
"#, dtb)?;
println!("{:x?} {:x?}", dtb, file_path);
println!("cargo:rerun-if-changed={}", dtb);
println!("cargo:rerun-if-env-changed=DTB");
}
Ok(file_path)
}
| 24.301587 | 61 | 0.587198 |
23bcbae88167bc050adf8d71ac4f269b59a44022 | 2,788 | //! Everything about wire type 2 (Length-delimited),
//! i.e. string, bytes, embedded messages, packed repeated fields.
pub fn escape_string(input: &str) -> String {
let escaped = snailquote::escape(input);
if !escaped.starts_with('"') && !escaped.starts_with('\'') {
format!("\"{}\"", escaped)
} else {
escaped.to_string()
}
}
#[derive(Debug, PartialEq)]
pub enum ShowAs<'a> {
String(&'a str),
Bytes(&'a [u8]),
}
pub fn show_as<'a>(bytes: &'a [u8]) -> ShowAs<'a> {
match std::str::from_utf8(bytes) {
Ok(converted) => {
if converted.chars().all(|char| match char {
'\t' | '\r' | '\n' => true, // Part of next range but should be allowed
'\0'..='\x19' => false, // Non-printable ASCII characters
_ => true,
}) {
ShowAs::String(converted)
} else {
ShowAs::Bytes(bytes)
}
}
Err(_err) => ShowAs::Bytes(bytes),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn escape_string_works() {
assert_eq!(escape_string(""), r#""""#);
assert_eq!(escape_string("a"), r#""a""#);
assert_eq!(escape_string("foo"), r#""foo""#);
// Spaces
assert_eq!(escape_string("foo bar"), r#"'foo bar'"#);
// Uses single quotes if that avoids escaping
assert_eq!(escape_string("fo\"o"), r#"'fo"o'"#);
assert_eq!(escape_string("{\"my\":\"json\"}"), r#"'{"my":"json"}'"#);
// Uses double quotes if both single and double are in content
assert_eq!(escape_string("f'o\"o"), r#""f'o\"o""#);
// This case would use single quotes in prettier which counts single and double
assert_eq!(
escape_string("{\"my\":\"json's\"}"),
r#""{\"my\":\"json's\"}""#
);
}
#[test]
fn show_as_works() {
assert_eq!(show_as(b""), ShowAs::String(""));
assert_eq!(show_as(b"123"), ShowAs::String("123"));
assert_eq!(show_as(b"with space"), ShowAs::String("with space"));
assert_eq!(show_as(b"Newline: \n"), ShowAs::String("Newline: \n"));
assert_eq!(show_as(b"Tab: \t"), ShowAs::String("Tab: \t"));
assert_eq!(show_as(b"CR: \r"), ShowAs::String("CR: \r"));
// Invalid UTF8
let non_utf8 = vec![0, 159, 146, 150];
assert_eq!(show_as(&non_utf8), ShowAs::Bytes(&non_utf8));
// Non-printable ASCII characters are valid UTF8 but should not be printed as string
assert_eq!(show_as(b"__\0__"), ShowAs::Bytes(b"__\0__")); // Null
assert_eq!(show_as(b"__\x07__"), ShowAs::Bytes(b"__\x07__")); // Bell
assert_eq!(show_as(b"__\x0b__"), ShowAs::Bytes(b"__\x0b__")); // Vertical Tab
}
}
| 34.419753 | 92 | 0.536944 |
ef75d350a133397f15d7d981effecea4de820901 | 6,537 | use serde::ser::Error;
use serde::{Deserialize, Serialize, Serializer};
use super::player::InventorySlot;
/// A block entity loaded or saved to the Anvil format.
/// Should be serialized using NBT.
///
/// https://minecraft.gamepedia.com/Chunk_format#Block_entity_format
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BlockEntityData {
#[serde(flatten)]
pub base: BlockEntityBase,
#[serde(flatten)]
pub kind: BlockEntityKind,
}
/// Data common to all block entities.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BlockEntityBase {
/// X coordinate in global coordinate space.
pub x: i32,
/// Y coordinate in global space.
pub y: i32,
/// Z coordinate in global space.
pub z: i32,
}
/// Kind of a block entity.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "id")]
pub enum BlockEntityKind {
#[serde(rename = "minecraft:beacon")]
#[serde(rename_all = "PascalCase")]
Beacon {
levels: i32,
primary: i32,
secondary: i32,
},
#[serde(rename = "minecraft:bed")]
#[serde(rename_all = "PascalCase")]
Bed, // empty in JE
#[serde(rename = "minecraft:brewing_stand")]
#[serde(rename_all = "PascalCase")]
BrewingStand {
#[serde(default)]
items: Vec<InventorySlot>,
brew_time: i16,
fuel: i8,
},
#[serde(rename = "minecraft:cauldron")]
#[serde(rename_all = "PascalCase")]
Cauldron {
#[serde(default)]
items: Vec<InventorySlot>,
potion_id: i16,
splash_potion: bool,
is_movable: bool,
},
#[serde(rename = "minecraft:chest")]
#[serde(rename_all = "PascalCase")]
Chest {
#[serde(default)]
items: Vec<InventorySlot>,
loot_table: Option<String>,
loot_table_seed: Option<i64>,
},
#[serde(rename = "minecraft:comparator")]
#[serde(rename_all = "PascalCase")]
Comparator { output_signal: i32 },
#[serde(rename = "minecraft:command_block")]
#[serde(rename_all = "PascalCase")]
CommandBlock {
custom_name: Option<String>,
command: String,
success_count: i32,
last_output: String,
track_output: bool,
powered: bool,
auto: bool,
condition_met: bool,
update_last_execution: bool,
last_execution: i64,
},
#[serde(rename = "minecraft:daylight_detector")]
#[serde(rename_all = "PascalCase")]
DaylightDetector, // empty
#[serde(rename = "minecraft:dispenser")]
#[serde(rename_all = "PascalCase")]
Dispenser {
#[serde(default)]
items: Vec<InventorySlot>,
},
#[serde(rename = "minecraft:dropper")]
#[serde(rename_all = "PascalCase")]
Dropper {
#[serde(default)]
items: Vec<InventorySlot>,
},
#[serde(rename = "minecraft:enchanting_table")]
#[serde(rename_all = "PascalCase")]
EnchantingTable,
#[serde(rename = "minecraft:ender_chest")]
#[serde(rename_all = "PascalCase")]
EnderChest,
#[serde(rename = "minecraft:end_gateway")]
#[serde(rename_all = "PascalCase")]
EndGateway { age: i64, exact_teleport: bool },
#[serde(rename = "minecraft:end_portal")]
#[serde(rename_all = "PascalCase")]
EndPortal,
#[serde(rename = "minecraft:furnace")]
#[serde(rename_all = "PascalCase")]
Furnace {
#[serde(default)]
items: Vec<InventorySlot>,
burn_time: i16,
cook_time: i16,
cook_time_total: i16,
},
#[serde(rename = "minecraft:hopper")]
#[serde(rename_all = "PascalCase")]
Hopper {
#[serde(default)]
items: Vec<InventorySlot>,
transfer_cooldown: i32,
},
#[serde(rename = "minecraft:jigsaw")]
#[serde(rename_all = "PascalCase")]
Jigsaw {
target_pool: String,
final_state: String,
/// spelled "attachement" on the wiki,
/// but mispelling is probably a mistake?
attachment_type: String,
},
#[serde(rename = "minecraft:jukebox")]
#[serde(rename_all = "PascalCase")]
Jukebox {
#[serde(default)]
record_item: InventorySlot,
},
// TODO: a few more
/// Fallback type for unknown block entities
#[serde(other, serialize_with = "BlockEntityKind::serialize_unknown")]
Unknown,
}
impl BlockEntityKind {
pub(crate) fn serialize_unknown<S: Serializer>(_serializer: S) -> Result<S::Ok, S::Error> {
Err(S::Error::custom("cannot serialize unknown block entities"))
}
pub fn variant(&self) -> BlockEntityVariant {
match self {
BlockEntityKind::Beacon { .. } => BlockEntityVariant::Beacon,
BlockEntityKind::Bed { .. } => BlockEntityVariant::Bed,
BlockEntityKind::BrewingStand { .. } => BlockEntityVariant::BrewingStand,
BlockEntityKind::Cauldron { .. } => BlockEntityVariant::Cauldron,
BlockEntityKind::Comparator { .. } => BlockEntityVariant::Comparator,
BlockEntityKind::CommandBlock { .. } => BlockEntityVariant::CommandBlock,
BlockEntityKind::Chest { .. } => BlockEntityVariant::Chest,
BlockEntityKind::DaylightDetector { .. } => BlockEntityVariant::DaylightDetector,
BlockEntityKind::Dispenser { .. } => BlockEntityVariant::Dispenser,
BlockEntityKind::Dropper { .. } => BlockEntityVariant::Dropper,
BlockEntityKind::EnchantingTable { .. } => BlockEntityVariant::EnchantingTable,
BlockEntityKind::EnderChest { .. } => BlockEntityVariant::EnderChest,
BlockEntityKind::EndGateway { .. } => BlockEntityVariant::EndGateway,
BlockEntityKind::EndPortal { .. } => BlockEntityVariant::EndPortal,
BlockEntityKind::Furnace { .. } => BlockEntityVariant::Furnace,
BlockEntityKind::Hopper { .. } => BlockEntityVariant::Hopper,
BlockEntityKind::Jigsaw { .. } => BlockEntityVariant::Jigsaw,
BlockEntityKind::Jukebox { .. } => BlockEntityVariant::Jukebox,
BlockEntityKind::Unknown { .. } => BlockEntityVariant::Unknown,
}
}
}
/// Variant of a `BlockEntityKind`.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum BlockEntityVariant {
Beacon,
Bed,
BrewingStand,
Cauldron,
Chest,
Comparator,
CommandBlock,
DaylightDetector,
Dispenser,
Dropper,
EnchantingTable,
EnderChest,
EndGateway,
EndPortal,
Furnace,
Hopper,
Jigsaw,
Jukebox,
Unknown,
}
| 32.361386 | 95 | 0.617103 |
67412e1677937b7afea82a624d6f13581bade34b | 4,963 | //! Extensions to `std::process` for Windows.
#![stable(feature = "process_extensions", since = "1.2.0")]
use crate::os::windows::io::{AsRawHandle, FromRawHandle, IntoRawHandle, RawHandle};
use crate::process;
use crate::sealed::Sealed;
use crate::sys;
use crate::sys_common::{AsInner, AsInnerMut, FromInner, IntoInner};
#[stable(feature = "process_extensions", since = "1.2.0")]
impl FromRawHandle for process::Stdio {
unsafe fn from_raw_handle(handle: RawHandle) -> process::Stdio {
let handle = sys::handle::Handle::new(handle as *mut _);
let io = sys::process::Stdio::Handle(handle);
process::Stdio::from_inner(io)
}
}
#[stable(feature = "process_extensions", since = "1.2.0")]
impl AsRawHandle for process::Child {
fn as_raw_handle(&self) -> RawHandle {
self.as_inner().handle().raw() as *mut _
}
}
#[stable(feature = "into_raw_os", since = "1.4.0")]
impl IntoRawHandle for process::Child {
fn into_raw_handle(self) -> RawHandle {
self.into_inner().into_handle().into_raw() as *mut _
}
}
#[stable(feature = "process_extensions", since = "1.2.0")]
impl AsRawHandle for process::ChildStdin {
fn as_raw_handle(&self) -> RawHandle {
self.as_inner().handle().raw() as *mut _
}
}
#[stable(feature = "process_extensions", since = "1.2.0")]
impl AsRawHandle for process::ChildStdout {
fn as_raw_handle(&self) -> RawHandle {
self.as_inner().handle().raw() as *mut _
}
}
#[stable(feature = "process_extensions", since = "1.2.0")]
impl AsRawHandle for process::ChildStderr {
fn as_raw_handle(&self) -> RawHandle {
self.as_inner().handle().raw() as *mut _
}
}
#[stable(feature = "into_raw_os", since = "1.4.0")]
impl IntoRawHandle for process::ChildStdin {
fn into_raw_handle(self) -> RawHandle {
self.into_inner().into_handle().into_raw() as *mut _
}
}
#[stable(feature = "into_raw_os", since = "1.4.0")]
impl IntoRawHandle for process::ChildStdout {
fn into_raw_handle(self) -> RawHandle {
self.into_inner().into_handle().into_raw() as *mut _
}
}
#[stable(feature = "into_raw_os", since = "1.4.0")]
impl IntoRawHandle for process::ChildStderr {
fn into_raw_handle(self) -> RawHandle {
self.into_inner().into_handle().into_raw() as *mut _
}
}
/// Windows-specific extensions to [`process::ExitStatus`].
///
/// This trait is sealed: it cannot be implemented outside the standard library.
/// This is so that future additional methods are not breaking changes.
#[stable(feature = "exit_status_from", since = "1.12.0")]
pub trait ExitStatusExt: Sealed {
/// Creates a new `ExitStatus` from the raw underlying `u32` return value of
/// a process.
#[stable(feature = "exit_status_from", since = "1.12.0")]
fn from_raw(raw: u32) -> Self;
}
#[stable(feature = "exit_status_from", since = "1.12.0")]
impl ExitStatusExt for process::ExitStatus {
fn from_raw(raw: u32) -> Self {
process::ExitStatus::from_inner(From::from(raw))
}
}
/// Windows-specific extensions to the [`process::Command`] builder.
///
/// This trait is sealed: it cannot be implemented outside the standard library.
/// This is so that future additional methods are not breaking changes.
#[stable(feature = "windows_process_extensions", since = "1.16.0")]
pub trait CommandExt: Sealed {
/// Sets the [process creation flags][1] to be passed to `CreateProcess`.
///
/// These will always be ORed with `CREATE_UNICODE_ENVIRONMENT`.
///
/// [1]: https://docs.microsoft.com/en-us/windows/win32/procthread/process-creation-flags
#[stable(feature = "windows_process_extensions", since = "1.16.0")]
fn creation_flags(&mut self, flags: u32) -> &mut process::Command;
/// Forces all arguments to be wrapped in quote (`"`) characters.
///
/// This is useful for passing arguments to [MSYS2/Cygwin][1] based
/// executables: these programs will expand unquoted arguments containing
/// wildcard characters (`?` and `*`) by searching for any file paths
/// matching the wildcard pattern.
///
/// Adding quotes has no effect when passing arguments to programs
/// that use [msvcrt][2]. This includes programs built with both
/// MinGW and MSVC.
///
/// [1]: <https://github.com/msys2/MSYS2-packages/issues/2176>
/// [2]: <https://msdn.microsoft.com/en-us/library/17w5ykft.aspx>
#[unstable(feature = "windows_process_extensions_force_quotes", issue = "82227")]
fn force_quotes(&mut self, enabled: bool) -> &mut process::Command;
}
#[stable(feature = "windows_process_extensions", since = "1.16.0")]
impl CommandExt for process::Command {
fn creation_flags(&mut self, flags: u32) -> &mut process::Command {
self.as_inner_mut().creation_flags(flags);
self
}
fn force_quotes(&mut self, enabled: bool) -> &mut process::Command {
self.as_inner_mut().force_quotes(enabled);
self
}
}
| 35.963768 | 93 | 0.665726 |
5de46bb45d8099268a883c88b880c29b09c7504b | 295 | fn main() {
let simulated_user_specified_value = 10;
let simulated_random_number = 7;
//let simulated_user_specified_value = 27;
// let simulated_random_number = 3;
cacher::generate_workout(
simulated_user_specified_value,
simulated_random_number
);
}
| 22.692308 | 46 | 0.694915 |
395adce4f0cb161010b675b3ed0900c1b028834f | 6,000 | #![allow(non_camel_case_types)]
#![allow(unused_unsafe)]
#![allow(unused)]
use crate::old::snapshot_0::hostcalls_impl::{ClockEventData, FdEventData};
use crate::old::snapshot_0::memory::*;
use crate::old::snapshot_0::sys::host_impl;
use crate::old::snapshot_0::wasi::{self, WasiError, WasiResult};
use crate::old::snapshot_0::wasi32;
use cpu_time::{ProcessTime, ThreadTime};
use lazy_static::lazy_static;
use std::convert::TryInto;
use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH};
lazy_static! {
static ref START_MONOTONIC: Instant = Instant::now();
static ref PERF_COUNTER_RES: u64 = get_perf_counter_resolution_ns();
}
// Timer resolution on Windows is really hard. We may consider exposing the resolution of the respective
// timers as an associated function in the future.
pub(crate) fn clock_res_get(
clock_id: wasi::__wasi_clockid_t,
) -> WasiResult<wasi::__wasi_timestamp_t> {
Ok(match clock_id {
// This is the best that we can do with std::time::SystemTime.
// Rust uses GetSystemTimeAsFileTime, which is said to have the resolution of
// 10ms or 55ms, [1] but MSDN doesn't confirm this in any way.
// Even the MSDN article on high resolution timestamps doesn't even mention the precision
// for this method. [3]
//
// The timer resolution can be queried using one of the functions: [2, 5]
// * NtQueryTimerResolution, which is undocumented and thus not exposed by the winapi crate
// * timeGetDevCaps, which returns the upper and lower bound for the precision, in ms.
// While the upper bound seems like something we could use, it's typically too high to be meaningful.
// For instance, the intervals return by the syscall are:
// * [1, 65536] on Wine
// * [1, 1000000] on Windows 10, which is up to (sic) 1000 seconds.
//
// It's possible to manually set the timer resolution, but this sounds like something which should
// only be done temporarily. [5]
//
// Alternatively, we could possibly use GetSystemTimePreciseAsFileTime in clock_time_get, but
// this syscall is only available starting from Windows 8.
// (we could possibly emulate it on earlier versions of Windows, see [4])
// The MSDN are not clear on the resolution of GetSystemTimePreciseAsFileTime either, but a
// Microsoft devblog entry [1] suggests that it kind of combines GetSystemTimeAsFileTime with
// QueryPeformanceCounter, which probably means that those two should have the same resolution.
//
// See also this discussion about the use of GetSystemTimePreciseAsFileTime in Python stdlib,
// which in particular contains some resolution benchmarks.
//
// [1] https://devblogs.microsoft.com/oldnewthing/20170921-00/?p=97057
// [2] http://www.windowstimestamp.com/description
// [3] https://docs.microsoft.com/en-us/windows/win32/sysinfo/acquiring-high-resolution-time-stamps?redirectedfrom=MSDN
// [4] https://www.codeproject.com/Tips/1011902/High-Resolution-Time-For-Windows
// [5] https://stackoverflow.com/questions/7685762/windows-7-timing-functions-how-to-use-getsystemtimeadjustment-correctly
// [6] https://bugs.python.org/issue19007
wasi::__WASI_CLOCKID_REALTIME => 55_000_000,
// std::time::Instant uses QueryPerformanceCounter & QueryPerformanceFrequency internally
wasi::__WASI_CLOCKID_MONOTONIC => *PERF_COUNTER_RES,
// The best we can do is to hardcode the value from the docs.
// https://docs.microsoft.com/en-us/windows/win32/api/processthreadsapi/nf-processthreadsapi-getprocesstimes
wasi::__WASI_CLOCKID_PROCESS_CPUTIME_ID => 100,
// The best we can do is to hardcode the value from the docs.
// https://docs.microsoft.com/en-us/windows/win32/api/processthreadsapi/nf-processthreadsapi-getthreadtimes
wasi::__WASI_CLOCKID_THREAD_CPUTIME_ID => 100,
_ => return Err(WasiError::EINVAL),
})
}
pub(crate) fn clock_time_get(
clock_id: wasi::__wasi_clockid_t,
) -> WasiResult<wasi::__wasi_timestamp_t> {
let duration = match clock_id {
wasi::__WASI_CLOCKID_REALTIME => get_monotonic_time(),
wasi::__WASI_CLOCKID_MONOTONIC => get_realtime_time()?,
wasi::__WASI_CLOCKID_PROCESS_CPUTIME_ID => get_proc_cputime()?,
wasi::__WASI_CLOCKID_THREAD_CPUTIME_ID => get_thread_cputime()?,
_ => return Err(WasiError::EINVAL),
};
duration.as_nanos().try_into().map_err(Into::into)
}
pub(crate) fn poll_oneoff(
timeout: Option<ClockEventData>,
fd_events: Vec<FdEventData>,
events: &mut Vec<wasi::__wasi_event_t>,
) -> WasiResult<Vec<wasi::__wasi_event_t>> {
unimplemented!("poll_oneoff")
}
fn get_monotonic_time() -> Duration {
// We're circumventing the fact that we can't get a Duration from an Instant
// The epoch of __WASI_CLOCKID_MONOTONIC is undefined, so we fix a time point once
// and count relative to this time point.
//
// The alternative would be to copy over the implementation of std::time::Instant
// to our source tree and add a conversion to std::time::Duration
START_MONOTONIC.elapsed()
}
fn get_realtime_time() -> WasiResult<Duration> {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.map_err(|_| WasiError::EFAULT)
}
fn get_proc_cputime() -> WasiResult<Duration> {
Ok(ProcessTime::try_now()?.as_duration())
}
fn get_thread_cputime() -> WasiResult<Duration> {
Ok(ThreadTime::try_now()?.as_duration())
}
fn get_perf_counter_resolution_ns() -> u64 {
use winx::time::perf_counter_frequency;
const NANOS_PER_SEC: u64 = 1_000_000_000;
// This should always succeed starting from Windows XP, so it's fine to panic in case of an error.
let freq = perf_counter_frequency().expect("QueryPerformanceFrequency returned an error");
let epsilon = NANOS_PER_SEC / freq;
epsilon
}
| 48.387097 | 130 | 0.704333 |
0829e31ff5d8086d5122a38cc42dd3fc0575cf7a | 512 | use thiserror::Error;
pub type Result<T> = std::result::Result<T, LeftError>;
#[derive(Debug, Error)]
pub enum LeftError {
// TODO move StateSocket away from lib OR use Config::save_state?
#[error("Parsing error: {0}")]
SerdeParse(#[from] serde_json::error::Error),
#[error("IO error: {0}")]
IoError(#[from] std::io::Error),
// TODO move Nanny to bin
#[error("XDG error: {0}")]
XdgBaseDirError(#[from] xdg::BaseDirectoriesError),
#[error("Stream error")]
StreamError,
}
| 28.444444 | 69 | 0.636719 |
39426ee3a7d186f2d4b5e329f80622d7bcaa3d74 | 85 | mod helpers;
use helpers::*;
#[cfg(test)]
mod rename;
#[cfg(test)]
mod rename_all;
| 9.444444 | 15 | 0.658824 |
abaa12bc5c34012d39173c25a2142cdb3e887aa8 | 692 | // Non-copyable types.
struct Empty;
struct Null;
// A trait generic over `T`.
trait DoubleDrop<T> {
// Define a method on the caller type which takes an
// additional single parameter `T` and does nothing with it.
fn double_drop(self, _: T);
}
// Implement `DoubleDrop<T>` for any generic parameter `T` and
// caller `U`.
impl<T, U> DoubleDrop<T> for U {
// This method takes ownership of both passed arguments,
// deallocating both.
fn double_drop(self, _: T) {}
}
fn main() {
let empty = Empty;
let null = Null;
// Deallocate `empty` and `null`.
empty.double_drop(null);
//empty;
//null;
// ^ TODO: Try uncommenting these lines.
}
| 22.322581 | 64 | 0.635838 |
2253152a7b991ad04dc7d60d211b139620dead09 | 1,055 | use prime_factors::factors;
use prime_factors::alt_implementations::odd_iteration;
use prime_factors::alt_implementations::every_iteration;
#[test]
fn test_no_factors() {
assert_eq!(factors(1), vec![]);
}
#[test]
fn test_prime_number() {
assert_eq!(factors(2), vec![2]);
}
#[test]
fn test_square_of_a_prime() {
assert_eq!(factors(9), vec![3, 3]);
}
#[test]
fn test_cube_of_a_prime() {
assert_eq!(factors(8), vec![2, 2, 2]);
}
#[test]
fn test_product_of_primes_and_non_primes() {
assert_eq!(factors(12), vec![2, 2, 3]);
}
#[test]
fn test_product_of_primes() {
assert_eq!(factors(901_255), vec![5, 17, 23, 461]);
}
#[test]
fn test_factors_include_large_prime() {
assert_eq!(factors(93_819_012_551), vec![11, 9539, 894_119]);
}
#[test]
fn test_factors_include_large_prime_every_iteration() {
assert_eq!(every_iteration::factors(93_819_012_551), vec![11, 9539, 894_119]);
}
#[test]
fn test_factors_include_large_prime_odd_iteration() {
assert_eq!(odd_iteration::factors(93_819_012_551), vec![11, 9539, 894_119]);
} | 21.979167 | 82 | 0.705213 |
e8227d0f82dfcfb85f0e91dd25f83a79752928a6 | 2,064 | //! This module is in charge of audio (both recording and playback).
use anyhow::{Context, Result};
use gstreamer as gst;
use scribl_curves::Time;
use crate::config::AudioInput as InputConfig;
mod appsrc;
mod handle;
mod snippets;
mod thread;
pub use appsrc::create_appsrc;
pub use handle::AudioHandle;
pub use snippets::{TalkSnippet, TalkSnippetId, TalkSnippets};
/// We do all of our internal audio processing at 48kHz.
pub const SAMPLE_RATE: u32 = 48000;
/// All the information needed to specify some audio for playback (or encoding).
#[derive(Clone)]
pub struct OutputData {
/// The collection of audio snippets. They will be mixed into the final audio output.
pub snips: TalkSnippets,
/// The time at which to start playing.
pub start_time: Time,
/// The velocity at which to play back the audio. (1.0 is normal, forwards, playback)
pub velocity: f64,
}
/// The result of recording audio: a buffer, and a bit of metadata.
pub struct AudioRecording {
/// The audio signal.
pub buf: Vec<i16>,
/// The perceptual loudness (in dB) of the audio.
pub loudness: f64,
/// The peak (as a number in [0.0, 1.0]) of the signal.
pub peak: f64,
}
/// These status messages are sent periodically from the audio thread to the main thread.
#[derive(Clone)]
pub struct AudioRecordingStatus {
/// The perceptual loudness (in dB) of some recent chunk of audio input.
pub loudness: f32,
/// The estimated probability that the input is speech.
pub vad: f32,
}
impl OutputData {
fn new() -> OutputData {
OutputData {
snips: TalkSnippets::default(),
start_time: Time::ZERO,
velocity: 1.0,
}
}
fn forwards(&self) -> bool {
self.velocity > 0.0
}
}
fn create_gst_elt(kind: &str, name: &str) -> Result<gst::Element> {
gst::ElementFactory::make(kind, Some(name)).with_context(|| {
format!(
"tried to create {}, of type {}. You are probably missing a gstreamer plugin",
name, kind
)
})
}
| 27.891892 | 90 | 0.656977 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.