file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
ptr.rs | use crate::source::snippet;
use crate::{get_pat_name, match_var};
use rustc_hir::intravisit::{walk_expr, NestedVisitorMap, Visitor};
use rustc_hir::{Body, BodyId, Expr, ExprKind, Param};
use rustc_lint::LateContext;
use rustc_middle::hir::map::Map;
use rustc_span::{Span, Symbol};
use std::borrow::Cow;
pub fn get_spans(
cx: &LateContext<'_>,
opt_body_id: Option<BodyId>,
idx: usize,
replacements: &[(&'static str, &'static str)],
) -> Option<Vec<(Span, Cow<'static, str>)>> {
if let Some(body) = opt_body_id.map(|id| cx.tcx.hir().body(id)) {
get_binding_name(&body.params[idx]).map_or_else(
|| Some(vec![]),
|name| extract_clone_suggestions(cx, name, replacements, body),
)
} else {
Some(vec![])
}
}
fn extract_clone_suggestions<'tcx>(
cx: &LateContext<'tcx>,
name: Symbol,
replace: &[(&'static str, &'static str)],
body: &'tcx Body<'_>,
) -> Option<Vec<(Span, Cow<'static, str>)>> {
let mut visitor = PtrCloneVisitor {
cx,
name, | };
visitor.visit_body(body);
if visitor.abort { None } else { Some(visitor.spans) }
}
struct PtrCloneVisitor<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
name: Symbol,
replace: &'a [(&'static str, &'static str)],
spans: Vec<(Span, Cow<'static, str>)>,
abort: bool,
}
impl<'a, 'tcx> Visitor<'tcx> for PtrCloneVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
if self.abort {
return;
}
if let ExprKind::MethodCall(seg, _, args, _) = expr.kind {
if args.len() == 1 && match_var(&args[0], self.name) {
if seg.ident.name.as_str() == "capacity" {
self.abort = true;
return;
}
for &(fn_name, suffix) in self.replace {
if seg.ident.name.as_str() == fn_name {
self.spans
.push((expr.span, snippet(self.cx, args[0].span, "_") + suffix));
return;
}
}
}
}
walk_expr(self, expr);
}
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
}
fn get_binding_name(arg: &Param<'_>) -> Option<Symbol> {
get_pat_name(arg.pat)
} | replace,
spans: vec![],
abort: false, |
staker.rs | use beserial::{Deserialize, Serialize};
use nimiq_database::WriteTransaction;
use nimiq_keys::Address;
use nimiq_primitives::coin::Coin;
use crate::staking_contract::receipts::StakerReceipt;
use crate::{Account, AccountError, AccountsTrie, StakingContract};
/// Struct representing a staker in the staking contract.
/// Actions concerning a staker are:
/// 1. Create: Creates a staker.
/// 2. Stake: Adds coins from any outside address to a staker's balance.
/// 3. Update: Updates the validator.
/// 4. Unstake: Removes coins from a staker's balance to outside the staking contract.
///
/// Create, Stake and Update are incoming transactions to the staking contract.
/// Unstake is an outgoing transaction from the staking contract.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct Staker {
// The address of the staker. The corresponding key is used for all transactions (except Stake
// which is open to any address).
pub address: Address,
// The staker's balance.
pub balance: Coin,
// The address of the validator for which the staker is delegating its stake for. If it is not
// delegating to any validator, this will be set to None.
pub delegation: Option<Address>,
}
impl StakingContract {
/// Creates a new staker. This function is public to fill the genesis staking contract.
pub fn create_staker(
accounts_tree: &AccountsTrie,
db_txn: &mut WriteTransaction,
staker_address: &Address,
value: Coin,
delegation: Option<Address>,
) -> Result<(), AccountError> {
// See if the staker already exists.
if StakingContract::get_staker(accounts_tree, db_txn, staker_address).is_some() {
return Err(AccountError::AlreadyExistentAddress {
address: staker_address.clone(),
});
}
// Get the staking contract and update it.
let mut staking_contract = StakingContract::get_staking_contract(accounts_tree, db_txn);
staking_contract.balance = Account::balance_add(staking_contract.balance, value)?;
// Create the staker struct.
let staker = Staker {
address: staker_address.clone(),
balance: value,
delegation: delegation.clone(),
};
// If we are staking for a validator, we need to update it.
if let Some(validator_address) = delegation {
// Get the validator.
let mut validator =
match StakingContract::get_validator(accounts_tree, db_txn, &validator_address) {
Some(v) => v,
None => {
return Err(AccountError::NonExistentAddress {
address: validator_address.clone(),
});
}
};
// Update it.
validator.balance = Account::balance_add(validator.balance, value)?;
if validator.inactivity_flag.is_none() {
staking_contract
.active_validators
.insert(validator_address.clone(), validator.balance);
}
validator.num_stakers += 1;
// All checks passed, not allowed to fail from here on!
// Re-add the validator entry.
accounts_tree.put(
db_txn,
&StakingContract::get_key_validator(&validator_address),
Account::StakingValidator(validator),
);
// Add the staker entry to the validator.
accounts_tree.put(
db_txn,
&StakingContract::get_key_validator_staker(&validator_address, staker_address),
Account::StakingValidatorsStaker(staker_address.clone()),
);
}
// Add the staking contract and the staker entries.
accounts_tree.put(
db_txn,
&StakingContract::get_key_staking_contract(),
Account::Staking(staking_contract),
);
accounts_tree.put(
db_txn,
&StakingContract::get_key_staker(staker_address),
Account::StakingStaker(staker),
);
Ok(())
}
/// Reverts a create staker transaction.
pub(crate) fn revert_create_staker(
accounts_tree: &AccountsTrie,
db_txn: &mut WriteTransaction,
staker_address: &Address,
value: Coin,
) -> Result<(), AccountError> {
// Get the staker and check if it exists.
let staker = match StakingContract::get_staker(accounts_tree, db_txn, staker_address) {
None => {
return Err(AccountError::NonExistentAddress {
address: staker_address.clone(),
});
}
Some(x) => x,
};
// If the transaction value is less than the staker's balance, this means that the original
// `create_staker` transaction failed and got downgraded to a `stake` transaction.
// In this case we simply revert the `stake` transaction.
debug_assert!(value <= staker.balance);
if value < staker.balance {
return StakingContract::revert_stake(accounts_tree, db_txn, staker_address, value);
}
// Get the staking contract main and update it.
let mut staking_contract = StakingContract::get_staking_contract(accounts_tree, db_txn);
staking_contract.balance = Account::balance_sub(staking_contract.balance, staker.balance)?;
// If we are staking for a validator, we need to update it.
if let Some(validator_address) = staker.delegation {
// Get the validator.
let mut validator =
match StakingContract::get_validator(accounts_tree, db_txn, &validator_address) {
Some(v) => v,
None => {
return Err(AccountError::NonExistentAddress {
address: validator_address.clone(),
});
}
};
// Update it.
validator.balance = Account::balance_sub(validator.balance, staker.balance)?;
if validator.inactivity_flag.is_none() {
staking_contract
.active_validators
.insert(validator_address.clone(), validator.balance);
}
validator.num_stakers -= 1;
// All checks passed, not allowed to fail from here on!
// Re-add the validator entry.
accounts_tree.put(
db_txn,
&StakingContract::get_key_validator(&validator_address),
Account::StakingValidator(validator),
);
// Remove the staker entry from the validator.
accounts_tree.remove(
db_txn,
&StakingContract::get_key_validator_staker(&validator_address, staker_address),
);
}
// Add the staking contract entry.
accounts_tree.put(
db_txn,
&StakingContract::get_key_staking_contract(),
Account::Staking(staking_contract),
);
// Remove the staker entry.
accounts_tree.remove(db_txn, &StakingContract::get_key_staker(staker_address));
Ok(())
}
/// Adds stake to a staker. It will be directly added to the staker's balance. Anyone can
/// stake for a staker.
/// If a staker at the address doesn't exist, one will be created.
pub(crate) fn stake(
accounts_tree: &AccountsTrie,
db_txn: &mut WriteTransaction,
staker_address: &Address,
value: Coin,
) -> Result<(), AccountError> {
// Get the staker and check if it exists.
let mut staker = match StakingContract::get_staker(accounts_tree, db_txn, staker_address) {
None => {
error!("Couldn't find the staker to which a stake transaction was destined. Plan B: Create a new staker at this address!");
Staker {
address: staker_address.clone(),
balance: Coin::ZERO,
delegation: None,
}
}
Some(x) => x,
};
// Update the balance.
staker.balance = Account::balance_add(staker.balance, value)?;
// Get the staking contract main and update it.
let mut staking_contract = StakingContract::get_staking_contract(accounts_tree, db_txn);
staking_contract.balance = Account::balance_add(staking_contract.balance, value)?;
// If we are staking for a validator, we need to update it too.
if let Some(validator_address) = &staker.delegation {
// Get the validator.
let mut validator =
match StakingContract::get_validator(accounts_tree, db_txn, validator_address) {
Some(v) => v,
None => {
return Err(AccountError::NonExistentAddress {
address: validator_address.clone(),
});
}
};
// Update it.
validator.balance = Account::balance_add(validator.balance, value)?;
if validator.inactivity_flag.is_none() {
staking_contract
.active_validators
.insert(validator_address.clone(), validator.balance);
}
// All checks passed, not allowed to fail from here on!
// Re-add the validator entry.
accounts_tree.put(
db_txn,
&StakingContract::get_key_validator(validator_address),
Account::StakingValidator(validator),
);
}
// Add the staking contract and the staker entries.
accounts_tree.put(
db_txn,
&StakingContract::get_key_staking_contract(),
Account::Staking(staking_contract),
);
accounts_tree.put(
db_txn,
&StakingContract::get_key_staker(staker_address),
Account::StakingStaker(staker),
);
Ok(())
}
/// Reverts a stake transaction.
pub(crate) fn revert_stake(
accounts_tree: &AccountsTrie,
db_txn: &mut WriteTransaction,
staker_address: &Address,
value: Coin,
) -> Result<(), AccountError> {
// Get the staker, check if it exists and update it.
let mut staker = match StakingContract::get_staker(accounts_tree, db_txn, staker_address) {
None => {
return Err(AccountError::NonExistentAddress {
address: staker_address.clone(),
});
}
Some(x) => x,
};
staker.balance = Account::balance_sub(staker.balance, value)?;
// Get the staking contract main and update it.
let mut staking_contract = StakingContract::get_staking_contract(accounts_tree, db_txn);
staking_contract.balance = Account::balance_sub(staking_contract.balance, value)?;
// If we are staking for a validator, we need to update it too.
if let Some(validator_address) = &staker.delegation {
// Get the validator.
let mut validator =
match StakingContract::get_validator(accounts_tree, db_txn, validator_address) {
Some(v) => v,
None => {
return Err(AccountError::NonExistentAddress {
address: validator_address.clone(),
});
}
};
// Update it.
validator.balance = Account::balance_sub(validator.balance, value)?;
if validator.inactivity_flag.is_none() {
staking_contract
.active_validators
.insert(validator_address.clone(), validator.balance);
}
// All checks passed, not allowed to fail from here on!
// Re-add the validator entry.
accounts_tree.put(
db_txn,
&StakingContract::get_key_validator(validator_address),
Account::StakingValidator(validator),
);
}
// Add the staking contract entries.
accounts_tree.put(
db_txn,
&StakingContract::get_key_staking_contract(),
Account::Staking(staking_contract),
);
// Add or remove the staker entry, depending on remaining balance.
if staker.balance.is_zero() {
accounts_tree.remove(db_txn, &StakingContract::get_key_staker(staker_address));
} else {
accounts_tree.put(
db_txn,
&StakingContract::get_key_staker(staker_address),
Account::StakingStaker(staker),
);
}
Ok(())
}
/// Updates the staker details. Right now you can only update the delegation.
pub(crate) fn update_staker(
accounts_tree: &AccountsTrie,
db_txn: &mut WriteTransaction,
staker_address: &Address,
delegation: Option<Address>,
) -> Result<StakerReceipt, AccountError> {
// Get the staker and check if it exists.
let mut staker = match StakingContract::get_staker(accounts_tree, db_txn, staker_address) {
None => {
error!("Tried to update a staker that doesn't exist!");
return Ok(StakerReceipt {
no_op: true,
delegation: None,
});
}
Some(x) => x,
};
// Get the staking contract main.
let mut staking_contract = StakingContract::get_staking_contract(accounts_tree, db_txn);
// Check that the validator from the new delegation exists.
if let Some(new_validator_address) = &delegation {
if StakingContract::get_validator(accounts_tree, db_txn, new_validator_address)
.is_none()
{
error!("Tried to delegate to a validator that doesn't exist!");
return Ok(StakerReceipt {
no_op: true,
delegation: None,
});
}
}
// All checks passed, not allowed to fail from here on!
// Create the receipt.
let receipt = StakerReceipt {
no_op: false,
delegation: staker.delegation.clone(),
};
// If we were staking for a validator, we remove ourselves from it.
if let Some(old_validator_address) = &staker.delegation {
// Get the validator.
let mut old_validator =
StakingContract::get_validator(accounts_tree, db_txn, old_validator_address)
.unwrap();
// Update it.
old_validator.balance = Account::balance_sub(old_validator.balance, staker.balance)?;
if old_validator.inactivity_flag.is_none() {
staking_contract
.active_validators
.insert(old_validator_address.clone(), old_validator.balance);
}
old_validator.num_stakers -= 1;
// Re-add the validator entry.
accounts_tree.put(
db_txn,
&StakingContract::get_key_validator(old_validator_address),
Account::StakingValidator(old_validator),
);
// Remove the staker entry from the validator.
accounts_tree.remove(
db_txn,
&StakingContract::get_key_validator_staker(old_validator_address, staker_address),
);
}
// If we are now staking for a validator, we add ourselves to it.
if let Some(new_validator_address) = &delegation {
// Get the validator.
let mut new_validator =
StakingContract::get_validator(accounts_tree, db_txn, new_validator_address)
.unwrap();
// Update it.
new_validator.balance = Account::balance_add(new_validator.balance, staker.balance)?;
if new_validator.inactivity_flag.is_none() {
staking_contract
.active_validators
.insert(new_validator_address.clone(), new_validator.balance);
}
new_validator.num_stakers += 1;
// Re-add the validator entry.
accounts_tree.put(
db_txn,
&StakingContract::get_key_validator(new_validator_address),
Account::StakingValidator(new_validator),
);
// Add the staker entry to the validator.
accounts_tree.put(
db_txn,
&StakingContract::get_key_validator_staker(new_validator_address, staker_address),
Account::StakingValidatorsStaker(staker_address.clone()),
);
}
// Update the staker and re-add it to the accounts tree.
staker.delegation = delegation;
accounts_tree.put(
db_txn,
&StakingContract::get_key_staker(staker_address),
Account::StakingStaker(staker),
);
// Save the staking contract.
accounts_tree.put(
db_txn,
&StakingContract::get_key_staking_contract(),
Account::Staking(staking_contract),
);
Ok(receipt)
}
/// Reverts updating staker details.
pub(crate) fn revert_update_staker(
accounts_tree: &AccountsTrie,
db_txn: &mut WriteTransaction,
staker_address: &Address,
receipt: StakerReceipt,
) -> Result<(), AccountError> |
/// Removes coins from a staker's balance. If the entire staker's balance is unstaked then the
/// staker is deleted.
pub(crate) fn unstake(
accounts_tree: &AccountsTrie,
db_txn: &mut WriteTransaction,
staker_address: &Address,
value: Coin,
) -> Result<Option<StakerReceipt>, AccountError> {
// Get the staking contract.
let mut staking_contract = StakingContract::get_staking_contract(accounts_tree, db_txn);
// Get the staker and check if it exists.
let mut staker = match StakingContract::get_staker(accounts_tree, db_txn, staker_address) {
None => {
return Err(AccountError::NonExistentAddress {
address: staker_address.clone(),
});
}
Some(x) => x,
};
// Update the staker.
staker.balance = Account::balance_sub(staker.balance, value)?;
// All checks passed, not allowed to fail from here on!
// If we are staking for a validator, we update it.
if let Some(validator_address) = &staker.delegation {
// Get the validator.
let mut validator =
match StakingContract::get_validator(accounts_tree, db_txn, validator_address) {
Some(v) => v,
None => {
return Err(AccountError::NonExistentAddress {
address: validator_address.clone(),
});
}
};
// Update it.
validator.balance = Account::balance_sub(validator.balance, value)?;
if validator.inactivity_flag.is_none() {
staking_contract
.active_validators
.insert(validator_address.clone(), validator.balance);
}
// If the staker balance is depleted, we have some extra updates for the validator.
if staker.balance.is_zero() {
validator.num_stakers -= 1;
// Remove the staker address from the validator.
accounts_tree.remove(
db_txn,
&StakingContract::get_key_validator_staker(validator_address, &staker.address),
);
}
// Re-add the validator entry.
accounts_tree.put(
db_txn,
&StakingContract::get_key_validator(validator_address),
Account::StakingValidator(validator),
);
}
// Update the staking contract.
staking_contract.balance = Account::balance_sub(staking_contract.balance, value)?;
accounts_tree.put(
db_txn,
&StakingContract::get_key_staking_contract(),
Account::Staking(staking_contract),
);
// Re-add or remove the staker entry, depending on remaining balance.
if staker.balance.is_zero() {
accounts_tree.remove(db_txn, &StakingContract::get_key_staker(&staker.address));
Ok(Some(StakerReceipt {
no_op: false,
delegation: staker.delegation,
}))
} else {
accounts_tree.put(
db_txn,
&StakingContract::get_key_staker(staker_address),
Account::StakingStaker(staker),
);
Ok(None)
}
}
/// Reverts a unstake transaction.
pub(crate) fn revert_unstake(
accounts_tree: &AccountsTrie,
db_txn: &mut WriteTransaction,
staker_address: &Address,
value: Coin,
receipt_opt: Option<StakerReceipt>,
) -> Result<(), AccountError> {
let mut staking_contract = StakingContract::get_staking_contract(accounts_tree, db_txn);
let staker = match receipt_opt {
Some(receipt) => {
if let Some(validator_address) = &receipt.delegation {
let mut validator = match StakingContract::get_validator(
accounts_tree,
db_txn,
validator_address,
) {
Some(v) => v,
None => {
return Err(AccountError::NonExistentAddress {
address: validator_address.clone(),
});
}
};
validator.balance = Account::balance_add(validator.balance, value)?;
validator.num_stakers += 1;
if validator.inactivity_flag.is_none() {
staking_contract
.active_validators
.insert(validator_address.clone(), validator.balance);
}
accounts_tree.put(
db_txn,
&StakingContract::get_key_validator(validator_address),
Account::StakingValidator(validator),
);
accounts_tree.put(
db_txn,
&StakingContract::get_key_validator_staker(
validator_address,
staker_address,
),
Account::StakingValidatorsStaker(staker_address.clone()),
);
}
Staker {
address: staker_address.clone(),
balance: value,
delegation: receipt.delegation,
}
}
None => {
let mut staker = StakingContract::get_staker(accounts_tree, db_txn, staker_address)
.ok_or(AccountError::NonExistentAddress {
address: staker_address.clone(),
})?;
staker.balance = Account::balance_add(staker.balance, value)?;
staker
}
};
accounts_tree.put(
db_txn,
&StakingContract::get_key_staker(staker_address),
Account::StakingStaker(staker),
);
staking_contract.balance = Account::balance_add(staking_contract.balance, value)?;
accounts_tree.put(
db_txn,
&StakingContract::get_key_staking_contract(),
Account::Staking(staking_contract),
);
Ok(())
}
}
| {
// If it was a no-op, we end right here.
if receipt.no_op {
return Ok(());
}
// Get the staking contract main.
let mut staking_contract = StakingContract::get_staking_contract(accounts_tree, db_txn);
// Get the staker and check if it exists.
let mut staker = match StakingContract::get_staker(accounts_tree, db_txn, staker_address) {
None => {
return Err(AccountError::NonExistentAddress {
address: staker_address.clone(),
});
}
Some(x) => x,
};
// Remove ourselves from the current delegation, if it exists.
if let Some(new_validator_address) = staker.delegation {
// Get the validator.
let mut new_validator =
match StakingContract::get_validator(accounts_tree, db_txn, &new_validator_address)
{
Some(v) => v,
None => {
return Err(AccountError::NonExistentAddress {
address: new_validator_address.clone(),
});
}
};
// Update it.
new_validator.balance = Account::balance_sub(new_validator.balance, staker.balance)?;
if new_validator.inactivity_flag.is_none() {
staking_contract
.active_validators
.insert(new_validator_address.clone(), new_validator.balance);
}
new_validator.num_stakers -= 1;
// Re-add the validator entry.
accounts_tree.put(
db_txn,
&StakingContract::get_key_validator(&new_validator_address),
Account::StakingValidator(new_validator),
);
// Remove the staker entry from the validator.
accounts_tree.remove(
db_txn,
&StakingContract::get_key_validator_staker(&new_validator_address, staker_address),
);
}
// Add ourselves to the previous delegation, if it existed.
if let Some(old_validator_address) = receipt.delegation.clone() {
// Get the validator.
let mut old_validator =
match StakingContract::get_validator(accounts_tree, db_txn, &old_validator_address)
{
Some(v) => v,
None => {
return Err(AccountError::NonExistentAddress {
address: old_validator_address.clone(),
});
}
};
// Update it.
old_validator.balance = Account::balance_add(old_validator.balance, staker.balance)?;
if old_validator.inactivity_flag.is_none() {
staking_contract
.active_validators
.insert(old_validator_address.clone(), old_validator.balance);
}
old_validator.num_stakers += 1;
// Re-add the validator entry.
accounts_tree.put(
db_txn,
&StakingContract::get_key_validator(&old_validator_address),
Account::StakingValidator(old_validator),
);
// Add the staker entry to the validator.
accounts_tree.put(
db_txn,
&StakingContract::get_key_validator_staker(&old_validator_address, staker_address),
Account::StakingValidatorsStaker(staker_address.clone()),
);
}
// Update the staker and re-add it to the accounts tree.
staker.delegation = receipt.delegation;
accounts_tree.put(
db_txn,
&StakingContract::get_key_staker(staker_address),
Account::StakingStaker(staker),
);
// Save the staking contract.
accounts_tree.put(
db_txn,
&StakingContract::get_key_staking_contract(),
Account::Staking(staking_contract),
);
Ok(())
} |
serialize.rs | use crate::{
attributes::{parse_attributes, Attributes},
util::{add_bounds, strip_raw},
with::{make_with_cast, make_with_ty},
};
use proc_macro2::TokenStream;
use quote::quote;
use syn::{
parse_quote, punctuated::Punctuated, spanned::Spanned, Data, DeriveInput, Error, Fields,
Generics, Ident, Index,
};
pub fn derive(input: DeriveInput) -> Result<TokenStream, Error> {
let attributes = parse_attributes(&input)?;
derive_serialize_impl(input, &attributes)
}
fn | (
mut input: DeriveInput,
attributes: &Attributes,
) -> Result<TokenStream, Error> {
let where_clause = input.generics.make_where_clause();
if let Some(ref bounds) = attributes.archive_bound {
add_bounds(bounds, where_clause)?;
}
if let Some(ref bounds) = attributes.serialize_bound {
add_bounds(bounds, where_clause)?;
}
let mut impl_input_params = Punctuated::default();
impl_input_params.push(parse_quote! { __S: Fallible + ?Sized });
for param in input.generics.params.iter() {
impl_input_params.push(param.clone());
}
let impl_input_generics = Generics {
lt_token: Some(Default::default()),
params: impl_input_params,
gt_token: Some(Default::default()),
where_clause: input.generics.where_clause.clone(),
};
let default_rkyv_path = parse_quote! { ::rkyv };
let rkyv_path = attributes.rkyv_path.as_ref().unwrap_or(&default_rkyv_path);
let with_ty = make_with_ty(rkyv_path);
let with_cast = make_with_cast(rkyv_path);
let name = &input.ident;
let (impl_generics, _, _) = impl_input_generics.split_for_impl();
let (_, ty_generics, where_clause) = input.generics.split_for_impl();
let where_clause = where_clause.unwrap();
let resolver = attributes.resolver.as_ref().map_or_else(
|| Ident::new(&format!("{}Resolver", strip_raw(name)), name.span()),
|value| value.clone(),
);
let serialize_impl = match input.data {
Data::Struct(ref data) => match data.fields {
Fields::Named(ref fields) => {
let mut serialize_where = where_clause.clone();
for field in fields
.named
.iter()
.filter(|f| !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds")))
{
let ty = with_ty(field)?;
serialize_where
.predicates
.push(parse_quote! { #ty: Serialize<__S> });
}
let resolver_values = fields.named.iter().map(|f| {
let name = &f.ident;
let field = with_cast(f, parse_quote! { &self.#name }).unwrap();
quote! { #name: Serialize::<__S>::serialize(#field, serializer)? }
});
quote! {
impl #impl_generics Serialize<__S> for #name #ty_generics #serialize_where {
#[inline]
fn serialize(&self, serializer: &mut __S) -> ::core::result::Result<Self::Resolver, __S::Error> {
Ok(#resolver {
#(#resolver_values,)*
})
}
}
}
}
Fields::Unnamed(ref fields) => {
let mut serialize_where = where_clause.clone();
for field in fields
.unnamed
.iter()
.filter(|f| !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds")))
{
let ty = with_ty(field)?;
serialize_where
.predicates
.push(parse_quote! { #ty: Serialize<__S> });
}
let resolver_values = fields.unnamed.iter().enumerate().map(|(i, f)| {
let index = Index::from(i);
let field = with_cast(f, parse_quote! { &self.#index }).unwrap();
quote! { Serialize::<__S>::serialize(#field, serializer)? }
});
quote! {
impl #impl_generics Serialize<__S> for #name #ty_generics #serialize_where {
#[inline]
fn serialize(&self, serializer: &mut __S) -> ::core::result::Result<Self::Resolver, __S::Error> {
Ok(#resolver(
#(#resolver_values,)*
))
}
}
}
}
Fields::Unit => {
quote! {
impl #impl_generics Serialize<__S> for #name #ty_generics #where_clause {
#[inline]
fn serialize(&self, serializer: &mut __S) -> ::core::result::Result<Self::Resolver, __S::Error> {
Ok(#resolver)
}
}
}
}
},
Data::Enum(ref data) => {
let mut serialize_where = where_clause.clone();
for variant in data.variants.iter() {
match variant.fields {
Fields::Named(ref fields) => {
for field in fields
.named
.iter()
.filter(|f| !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds")))
{
let ty = with_ty(field)?;
serialize_where
.predicates
.push(parse_quote! { #ty: Serialize<__S> });
}
}
Fields::Unnamed(ref fields) => {
for field in fields
.unnamed
.iter()
.filter(|f| !f.attrs.iter().any(|a| a.path.is_ident("omit_bounds")))
{
let ty = with_ty(field)?;
serialize_where
.predicates
.push(parse_quote! { #ty: Serialize<__S> });
}
}
Fields::Unit => (),
}
}
let serialize_arms = data.variants.iter().map(|v| {
let variant = &v.ident;
match v.fields {
Fields::Named(ref fields) => {
let bindings = fields.named.iter().map(|f| {
let name = &f.ident;
quote! { #name }
});
let fields = fields.named.iter().map(|f| {
let name = &f.ident;
let field = with_cast(f, parse_quote! { #name }).unwrap();
quote! {
#name: Serialize::<__S>::serialize(#field, serializer)?
}
});
quote! {
Self::#variant { #(#bindings,)* } => #resolver::#variant {
#(#fields,)*
}
}
}
Fields::Unnamed(ref fields) => {
let bindings = fields.unnamed.iter().enumerate().map(|(i, f)| {
let name = Ident::new(&format!("_{}", i), f.span());
quote! { #name }
});
let fields = fields.unnamed.iter().enumerate().map(|(i, f)| {
let binding = Ident::new(&format!("_{}", i), f.span());
let field = with_cast(f, parse_quote! { #binding }).unwrap();
quote! {
Serialize::<__S>::serialize(#field, serializer)?
}
});
quote! {
Self::#variant( #(#bindings,)* ) => #resolver::#variant(#(#fields,)*)
}
}
Fields::Unit => {
quote! { Self::#variant => #resolver::#variant }
}
}
});
quote! {
impl #impl_generics Serialize<__S> for #name #ty_generics #serialize_where {
#[inline]
fn serialize(&self, serializer: &mut __S) -> ::core::result::Result<Self::Resolver, __S::Error> {
Ok(match self {
#(#serialize_arms,)*
})
}
}
}
}
Data::Union(_) => {
return Err(Error::new_spanned(
input,
"Serialize cannot be derived for unions",
))
}
};
Ok(quote! {
#[automatically_derived]
const _: () = {
use #rkyv_path::{Archive, Fallible, Serialize};
#serialize_impl
};
})
}
| derive_serialize_impl |
0003_reviews.py | # Generated by Django 2.2.11 on 2020-04-01 18:44
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
| dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('products', '0002_auto_20200314_0741'),
]
operations = [
migrations.CreateModel(
name='Reviews',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rating', models.IntegerField(choices=[(1, 'Poor'), (2, 'Below Average'), (3, 'Average'), (4, 'Better than Average'), (5, 'Excellent')], default=3)),
('review', models.TextField()),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reviews', to='products.Product')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
] |
|
link.ts | //
// Copyright (c) Microsoft.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
import { Router } from 'express';
import asyncHandler from 'express-async-handler';
const router: Router = Router();
import { ReposAppRequest, IAppSession, SupportedLinkType, ICorporateLink, LinkOperationSource } from '../interfaces';
import { getProviders, splitSemiColonCommas } from '../transitional';
import { IndividualContext } from '../user';
import { storeOriginalUrlAsReferrer, wrapError } from '../utils';
import validator from 'validator';
import unlinkRoute from './unlink';
import { jsonError } from '../middleware';
interface IRequestWithSession extends ReposAppRequest {
session: IAppSession;
}
interface IRequestHacked extends ReposAppRequest {
overrideLinkUserPrincipalName?: any;
}
router.use((req: IRequestHacked, res, next) => {
const config = getProviders(req).config;;
if (config && config.github && config.github.links && config.github.links.provider && config.github.links.provider.linkingOfflineMessage) {
return next(new Error(`Linking is temporarily offline: ${config.github.links.provider.linkingOfflineMessage}`));
} else {
return next();
}
});
router.use('/', asyncHandler(async function (req: ReposAppRequest, res, next) {
// Make sure both account types are authenticated before showing the link pg [wi 12690]
const individualContext = req.individualContext;
if (!individualContext.corporateIdentity || !individualContext.getGitHubIdentity()) {
req.insights.trackEvent({ name: 'PortalSessionNeedsBothGitHubAndAadUsernames' });
return res.redirect('/?signin');
}
return next();
}));
// TODO: graph provider non-guest check should be middleware and in the link business process
router.use(asyncHandler(async (req: IRequestHacked, res, next) => {
const individualContext = req.individualContext as IndividualContext;
const providers = getProviders(req);
const insights = providers.insights;
const config = providers.config;
let validateAndBlockGuests = false;
if (config && config.activeDirectory && config.activeDirectory.blockGuestUserTypes) {
validateAndBlockGuests = true;
}
// If the app has not been configured to check whether a user is a guest before linking, continue:
if (!validateAndBlockGuests) {
return next();
}
const aadId = individualContext.corporateIdentity.id;
// If the app is configured to check guest status, do this now, before linking:
const graphProvider = providers.graphProvider;
// REFACTOR: delegate the decision to the auth provider
if (!graphProvider || !graphProvider.getUserById) {
return next(new Error('User type validation cannot be performed because there is no graphProvider configured for this type of account'));
}
insights.trackEvent({
name: 'LinkValidateNotGuestStart',
properties: {
aadId: aadId,
},
});
try {
const details = await graphProvider.getUserById(aadId);
const userType = details.userType;
const displayName = details.displayName;
const userPrincipalName = details.userPrincipalName;
let block = userType as string === 'Guest';
let blockedRecord = block ? 'BLOCKED' : 'not blocked';
// If the app is configured to check for guests, but this is a specifically permitted guest user, continue:
if (config?.activeDirectoryGuests) {
const authorizedGuests = Array.isArray(config.activeDirectoryGuests) ? config.activeDirectoryGuests as string[] : splitSemiColonCommas(config.activeDirectoryGuests);
if (!authorizedGuests.includes(aadId)) {
block = false;
blockedRecord = 'specifically authorized user ' + aadId + ' ' + userPrincipalName;
req.overrideLinkUserPrincipalName = userPrincipalName;
return next(new Error('This feature is not currently available. Please reach out to support to re-enable this feature.'));
}
}
insights.trackEvent({
name: 'LinkValidateNotGuestGraphSuccess',
properties: {
aadId: aadId,
userType: userType,
displayName: displayName,
userPrincipalName: userPrincipalName,
blocked: blockedRecord,
},
});
if (block) {
insights.trackMetric({ name: 'LinksBlockedForGuests', value: 1 });
return next(new Error(`This system is not available to guests. You are currently signed in as ${displayName} ${userPrincipalName}. Please sign out or try a private browser window.`));
}
const manager = await providers.graphProvider.getManagerById(aadId);
if (!manager || !manager.userPrincipalName) {
throw new Error(`You do not have an active manager entry in the directory and so cannot yet link.`);
}
return next();
} catch (graphError) {
insights.trackException({
exception: graphError,
properties: {
aadId: aadId,
name: 'LinkValidateNotGuestGraphFailure',
},
});
return next(graphError);
}
}));
router.get('/', asyncHandler(async function (req: ReposAppRequest, res, next) {
const individualContext = req.individualContext;
const link = individualContext.link;
if (!individualContext.corporateIdentity && !individualContext.getGitHubIdentity()) {
req.insights.trackEvent({ name: 'PortalSessionNeedsBothGitHubAndAadUsernames' });
return res.redirect('/?signin');
}
if (!individualContext.getGitHubIdentity()) {
req.insights.trackEvent({ name: 'PortalSessionNeedsGitHubUsername' });
return res.redirect('/signin/github/');
}
if (!link) {
return await showLinkPage(req, res);
} else {
req.insights.trackEvent({ name: 'LinkRouteLinkLocated' });
let organizations = null;
try {
organizations = await individualContext.aggregations.organizations();
} catch (ignoredError) {
/* ignore */
}
return individualContext.webContext.render({
view: 'linkConfirmed',
title: 'You\'re already linked',
state: {
organizations,
}
});
}
}));
async function showLinkPage(req: ReposAppRequest, res) {
const individualContext = req.individualContext as IndividualContext;
function | (options) {
individualContext.webContext.render({
view: 'link',
title: 'Link GitHub with corporate identity',
optionalObject: options || {},
});
}
const { config, graphProvider } = getProviders(req);
if (config.authentication.scheme !== 'aad' || !graphProvider) {
return render(null);
}
const aadId = individualContext.corporateIdentity.id;
const { operations } = getProviders(req);
// By design, we want to log the errors but do not want any individual
// lookup problem to break the underlying experience of letting a user
// link. This is important if someone is new in the company, they may
// not be in the graph fully yet.
const userLinkData = await operations.validateCorporateAccountCanLink(aadId);
render({
graphUser: userLinkData.graphEntry,
isServiceAccountCandidate: userLinkData.type === SupportedLinkType.ServiceAccount,
});
}
router.get('/enableMultipleAccounts', function (req: IRequestWithSession, res) {
// LEGACY
// TODO: is this code still ever really used?
if (req.user.github) {
req.session.enableMultipleAccounts = true;
return res.redirect('/link/cleanup');
}
req.insights.trackEvent({ name: 'PortalUserEnabledMultipleAccounts' });
storeOriginalUrlAsReferrer(req, res, '/auth/github', 'multiple accounts enabled need to auth with GitHub again now');
});
router.post('/', asyncHandler(async (req: ReposAppRequest, res, next) => {
const individualContext = req.individualContext as IndividualContext;
try {
await interactiveLinkUser(false, individualContext, req, res, next);
} catch (error) {
return next(error);
}
}));
export async function interactiveLinkUser(isJson: boolean, individualContext: IndividualContext, req, res, next) {
const isServiceAccount = req.body.sa === '1';
const serviceAccountMail = req.body.serviceAccountMail;
const { operations } = getProviders(req);
if (isServiceAccount && !validator.isEmail(serviceAccountMail)) {
const errorMessage = 'Please enter a valid e-mail address for the Service Account maintainer.';
return next(isJson ? jsonError(errorMessage, 400) : wrapError(null, errorMessage, true));
}
let newLinkObject: ICorporateLink = null;
try {
newLinkObject = individualContext.createGitHubLinkObject();
} catch (missingInformationError) {
return next(missingInformationError);
}
if (isServiceAccount) {
newLinkObject.isServiceAccount = true;
newLinkObject.serviceAccountMail = serviceAccountMail;
const address = operations.getOperationsMailAddress();
const errorMessage = `Service Account linking is not available. Please reach out to ${address} for more information.`;
return next(isJson ? jsonError(errorMessage, 400) : new Error(errorMessage));
}
try {
await operations.linkAccounts({
link: newLinkObject,
operationSource: LinkOperationSource.Portal,
correlationId: individualContext.webContext?.correlationId || 'N/A',
skipGitHubValidation: true, // already has been verified in the recent session
});
if (isJson) {
res.status(201);
return res.end();
} else {
return res.redirect('/?onboarding=yes');
}
} catch (createError) {
const errorMessage = `We had trouble linking your corporate and GitHub accounts: ${createError.message}`;
return next(isJson ? jsonError(errorMessage, 500) : wrapError(createError, errorMessage));
}
}
router.use('/remove', unlinkRoute);
router.get('/reconnect', function (req: ReposAppRequest, res, next) {
const config = getProviders(req).config;;
if (config.authentication.scheme !== 'aad') {
return next(wrapError(null, 'Account reconnection is only needed for Active Directory authentication applications.', true));
}
// If the request comes back to the reconnect page, the authenticated app will
// actually update the link the next time around.
const ghi = req.individualContext.getGitHubIdentity();
const hasToken = !!req.individualContext.webContext.tokens.gitHubReadToken;
if (ghi && ghi.id && ghi.username && hasToken) {
req.insights.trackEvent({ name: 'PortalUserReconnected' });
return res.redirect('/');
}
req.insights.trackEvent({ name: 'PortalUserReconnectNeeded' });
req.individualContext.webContext.render({
view: 'reconnectGitHub',
title: 'Please sign in with GitHub',
state: {
expectedUsername: ghi.username,
},
});
});
export default router;
| render |
signer.py | # coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from __future__ import absolute_import
import base64
import email.utils
import hashlib
import io
import functools
import os
from oci._vendor import six
from oci.util import record_body_position_for_rewind, rewind_body, back_up_body_calculate_stream_content_length, read_stream_for_signing
from ._vendor import httpsig_cffi, requests
from .exceptions import InvalidPrivateKey, MissingPrivateKeyPassphrase
from cryptography.exceptions import UnsupportedAlgorithm
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
import logging
logger = logging.getLogger(__name__)
SIGNATURE_VERSION = "1"
def load_private_key_from_file(filename, pass_phrase=None):
filename = os.path.expanduser(filename)
with io.open(filename, mode="rb") as f:
private_key_data = f.read().strip()
return load_private_key(private_key_data, pass_phrase)
def load_private_key(secret, pass_phrase):
"""Loads a private key that may use a pass_phrase.
Tries to correct or diagnose common errors:
- provided pass_phrase but didn't need one
- provided a public key
"""
if isinstance(secret, six.text_type):
secret = secret.encode("ascii")
if isinstance(pass_phrase, six.text_type):
pass_phrase = pass_phrase.encode("ascii")
backend = default_backend()
try:
# 0) Try with pass_phrase
return serialization.load_pem_private_key(secret, pass_phrase, backend=backend)
except TypeError:
# 1) Either:
# - key has pass_phrase and one wasn't provided
# - key doesn't have pass_phrase and one was provided.
#
# Can't fix the first, but we *can* fix the second.
# This can happen if the DEFAULT profile has a pass_phrase but
# another profile uses a key file without a pass_phrase.
if pass_phrase is None:
# 1.1) private key needed a pass_phrase and we don't have one
raise MissingPrivateKeyPassphrase("The provided key requires a passphrase.")
else:
# 1.2) try again without pass_phrase; could be an artifact from DEFAULT
return serialization.load_pem_private_key(secret, None, backend=backend)
except ValueError:
# 2) Try to determine what kind of failure this is.
# Most likely, this is either a bad password or a public key.
# If loading it as a public key fails, it's almost certainly a bad password.
for loader in [
serialization.load_der_public_key,
serialization.load_pem_public_key,
serialization.load_ssh_public_key
]:
try:
loader(secret, backend=backend)
except (ValueError, UnsupportedAlgorithm):
# 2.1) Not a public key; try the next format
pass
else:
# 2.2) This is a public key
raise InvalidPrivateKey("Authentication requires a private key, but a public key was provided.")
# 2.3) Password is probably wrong.
raise InvalidPrivateKey("The provided key is not a private key, or the provided passphrase is incorrect.")
def inject_missing_headers(request, sign_body, enforce_content_headers):
# Inject date, host, and content-type if missing
request.headers.setdefault(
"date", email.utils.formatdate(usegmt=True))
request.headers.setdefault(
"host", six.moves.urllib.parse.urlparse(request.url).netloc)
if hasattr(request.body, "buffer") or hasattr(request.body, "read"):
request.headers.setdefault("content-type", "application/octet-stream")
request.headers.setdefault("content-type", "application/json")
if enforce_content_headers:
# Requests with a body need to send content-type,
# content-length, and x-content-sha256
if "x-content-sha256" not in request.headers and sign_body:
body = request.body or ""
m = hashlib.sha256()
# Handle String types
if isinstance(body, six.string_types):
body = body.encode("utf-8")
request.headers.setdefault("content-length", str(len(body)))
m.update(body)
# Handle bytes
elif isinstance(body, (bytes, bytearray)):
m.update(body)
# Handling signing for Files/stdin
elif hasattr(body, "buffer") or hasattr(body, "read"):
is_body_rewindable, original_position = record_body_position_for_rewind(body)
if is_body_rewindable:
content_length = read_stream_for_signing(m, body)
if content_length == -1:
raise IOError("Unable to read stream for signing! Please sign the stream yourself by using the custom header x-content-sha256")
request.headers.setdefault("content-length", str(content_length))
is_rewind_success = rewind_body(body, original_position)
if not is_rewind_success:
raise IOError("Unable to rewind request body while signing!")
else:
logger.warning("Stream cannot be rewound, trying to backup and sign the body!")
stream = back_up_body_calculate_stream_content_length(body)
# Updating request body as it cannot be rewound
request.body = stream.get("byte_content")
m.update(stream.get("byte_content"))
request.headers.setdefault("content-length", str(stream.get("content_length")))
# Update sha256 header
if m:
base64digest = base64.b64encode(m.digest())
base64string = base64digest.decode("utf-8")
request.headers["x-content-sha256"] = base64string
# HeaderSigner doesn't support private keys with passwords.
# Patched since the constructor parses the key in __init__
class _PatchedHeaderSigner(httpsig_cffi.sign.HeaderSigner):
HEADER_SIGNER_TEMPLATE = 'Signature algorithm="rsa-sha256",headers="{}",keyId="{}",signature="%s",version="{}"'
"""Internal. If you need to construct a Signer, use :class:`~.Signer` instead."""
def __init__(self, key_id, private_key, headers):
# Dropped general support for the specific signing/hash the SDK uses.
self.sign_algorithm = "rsa"
self.hash_algorithm = "sha256"
self._hash = None
self._rsahash = httpsig_cffi.utils.HASHES[self.hash_algorithm]
self._rsa_private = private_key
self._rsa_public = self._rsa_private.public_key()
self.headers = headers
self.signature_template = self.HEADER_SIGNER_TEMPLATE.format(" ".join(headers), key_id, SIGNATURE_VERSION)
def reset_signer(self, key_id, private_key):
self._hash = None
self._rsa_private = private_key
self._rsa_public = self._rsa_private.public_key()
self.signature_template = self.HEADER_SIGNER_TEMPLATE.format(" ".join(self.headers), key_id, SIGNATURE_VERSION)
# An abstract class whose subclasses can sign requests. This contains the core logic for creating a signer and signing
# requests, but does not source the required information:
#
# - api key
# - private key
# - headers
#
# As concrete implementations are expected to provide these and have their ways of sourcing/constructing them.
class AbstractBaseSigner(requests.auth.AuthBase):
def create_signers(self, api_key, private_key, generic_headers, body_headers):
self._basic_signer = _PatchedHeaderSigner(
key_id=api_key,
private_key=private_key,
headers=generic_headers)
self._body_signer = _PatchedHeaderSigner(
key_id=api_key,
private_key=private_key,
headers=generic_headers + body_headers)
def validate_request(self, request):
verb = request.method.lower()
if verb not in ["get", "head", "delete", "put", "post", "patch"]:
raise ValueError("Don't know how to sign request verb {}".format(verb))
def do_request_sign(self, request, enforce_content_headers=True):
verb = request.method.lower()
sign_body = verb in ["put", "post", "patch"]
if sign_body and enforce_content_headers:
signer = self._body_signer
else:
signer = self._basic_signer
# The requests library sets the Transfer-Encoding header to 'chunked' if the
# body is a stream with 0 length. Object storage does not currently support this option,
# and the request will fail if it is not removed. This is the only hook available where we
# can do this after the header is added and before the request is sent.
request.headers.pop('Transfer-Encoding', None)
inject_missing_headers(request, sign_body, enforce_content_headers)
signed_headers = signer.sign(
request.headers,
host=six.moves.urllib.parse.urlparse(request.url).netloc,
method=request.method,
path=request.path_url)
request.headers.update(signed_headers)
return request
def __call__(self, request, enforce_content_headers=True):
self.validate_request(request)
return self.do_request_sign(request, enforce_content_headers)
@property
def without_content_headers(self):
|
class Signer(AbstractBaseSigner):
"""
A requests auth instance that can be reused across requests. This signer is intended to be used
when signing requests for a given user and it requires that user's ID, their private key
and cerificate fingerprint.
The private key can be sourced from a file (private_key_file_location) or the PEM string can be
provided directly (private_key_content).
The headers to be signed by this signer are not customizable.
You can manually sign calls by creating an instance of the signer, and
providing it as the ``auth`` argument to Requests functions:
.. code-block:: python
import requests
from oci import Signer
auth = Signer(...)
resp = requests.get("https://...", auth=auth)
"""
def __init__(self, tenancy, user, fingerprint, private_key_file_location, pass_phrase=None, private_key_content=None):
self.api_key = tenancy + "/" + user + "/" + fingerprint
if private_key_content:
self.private_key = load_private_key(private_key_content, pass_phrase)
else:
self.private_key = load_private_key_from_file(private_key_file_location, pass_phrase)
generic_headers = ["date", "(request-target)", "host"]
body_headers = ["content-length", "content-type", "x-content-sha256"]
self.create_signers(self.api_key, self.private_key, generic_headers, body_headers)
@staticmethod
def from_config(config):
from .config import validate_config
validate_config(config)
return Signer(
config['tenancy'],
config['user'],
config['fingerprint'],
private_key_file_location=config['key_file'],
pass_phrase=config.get('pass_phrase'),
private_key_content=config.get('key_content')
)
| return functools.partial(self, enforce_content_headers=False) |
auto_test.py | #############################################################
# win10 64bit
# python 3.9.6
#
# author: toush1 (20373944 he tianran)
#############################################################
import os
import re
# software path
xilinxPath = "G:\\ISE\\ise\\14.7\\ISE_DS\\ISE\\"
marsPath = "G:\\mars\\Mars_test.jar"
# prj path and test mode
myPrjPath = "D:\\study\\CO\\p7\\MIPSMicroSystem\\"
otherPrjPath = "D:\\study\\CO\\p7\\szxCPU\\"
start = 0
tot = 1
interrupt = 0x301c # if 0 not interrupt; if -1 interrupt all; if 0x3000 interrupt at 0x3000
# dump text and handler (and run in Mars)
def runMars(asm, codeFilePath, out):
path = os.path.dirname(codeFilePath) + "\\"
code = path + "code.tmp"
handler = path + "handler.tmp"
os.system("java -jar " + marsPath + " db nc mc CompactDataAtZero a dump .text HexText " + code + " " + asm)
os.system("java -jar " + marsPath + " db nc mc CompactDataAtZero a dump 0x00004180-0x00005180 HexText " + handler + " " + asm)
# os.system("java -jar " + marsPath + " " + asm + " 4096 db nc mc CompactDataAtZero > " + out)
with open(code, "r") as codeSrc, open(handler, "r") as handlerSrc, open(codeFilePath, "w") as codeDst:
codeText = codeSrc.read()
textLen = len(codeText.splitlines())
codeDst.write(codeText)
for i in range(len(codeText.splitlines()), 1120):
codeDst.write("00000000\n")
codeDst.write(handlerSrc.read())
os.remove(code)
os.remove(handler)
return textLen
# gnrt prj and tcl file
def initISE(prj):
|
# change interrupt position in testbench
def changeIntPos(tbPath, intPos):
text = ""
with open(tbPath, "r") as testbench:
text = testbench.read()
if intPos == 0:
text = text.replace("need_interrupt = 1", "need_interrupt = 0")
else:
text = text.replace("need_interrupt = 0", "need_interrupt = 1")
text = re.sub(r"fixed_macroscopic_pc == 32'h[0-9a-f]+",
"fixed_macroscopic_pc == 32'h" + str(hex(intPos)).removeprefix("0x"), text)
with open(tbPath, "w") as testbench:
testbench.write(text)
# compile and run in ISE
def runISE(prj, out):
prjFilePath = prj + "mips.prj"
tclFilePath = prj + "mips.tcl"
exeFilePath = prj + "mips.exe"
logFilePath = prj + "log.txt"
os.chdir(prj)
os.environ['XILINX'] = xilinxPath
os.system(xilinxPath + "bin\\nt64\\fuse -nodebug -prj " + prjFilePath + " -o " + exeFilePath + " mips_tb > " + logFilePath)
os.system(exeFilePath + " -nolog -tclbatch " + tclFilePath + " > " + out)
# cmp myAns and stdAns
def cmp(interrupt, my, std, cmpRes):
with open(my, "r") as myFile, open(std, "r") as stdFile, open(cmpRes, "a") as out:
myLogs = re.findall("\@[^\n]*", myFile.read())
stdLogs = re.findall("\@[^\n]*", stdFile.read())
if interrupt != 0:
out.write("interrupt at " + str(hex(interrupt)) + " : \n")
print("interrupt at " + str(hex(interrupt)) + " : ")
else:
out.write("no interrupt : \n")
print("no interrupt : ")
for i in range(len(stdLogs)):
if i < len(myLogs) and myLogs[i] != stdLogs[i]:
out.write("\tOn Line " + str(i+1) + "\n")
out.write("\tGet\t\t: " + myLogs[i] + "\n")
out.write("\tExpect\t: " + stdLogs[i] + "\n")
print("\tOn Line " + str(i+1))
print("\tGet\t: " + myLogs[i])
print("\tExpect\t: " + stdLogs[i])
return False
elif i >= len(myLogs):
out.write("\tmyLogs is too short\n")
print("\tmyLogs is too short")
return False
if len(myLogs) > len(stdLogs):
out.write("\tmyLogs is too long\n")
print("\tmyLogs is too long")
return False
return True
# main
initISE(myPrjPath)
initISE(otherPrjPath)
testdataPath = myPrjPath + "my_files\\test\\data\\"
cmpResPath = testdataPath + "cmp_res.txt"
myTbPath = myPrjPath + "my_files\\cpu\\mips_tb.v"
otherTbPath = otherPrjPath + "my_files\\cpu\\mips_tb.v"
if os.path.exists(cmpResPath):
os.remove(cmpResPath)
for i in range(start, start + tot):
testpointPath = testdataPath + "testpoint\\testpoint" + str(i) + ".asm"
codePath = testdataPath + "code\\code" + str(i) + ".txt"
stdAnsPath = testdataPath + "std_ans\\std_ans" + str(i) + ".txt"
testAnsPath = testdataPath + "test_ans\\test_ans" + str(i) + ".txt"
textLen = runMars(testpointPath, codePath, stdAnsPath) - 4
with open(codePath, "r") as codeSrc, open(myPrjPath + "code.txt", "w") as codeDst1, open(otherPrjPath + "code.txt", "w") as codeDst2:
code = codeSrc.read()
codeDst1.write(code)
codeDst2.write(code)
with open(cmpResPath, "a") as out:
out.write("\n----------------------------------------------------------------\n")
out.write("\nin testpoint" + str(i) + " : \n\n")
print("\n----------------------------------------------------------------")
print("\nin testpoint" + str(i) + " : \n")
isAC = True
if interrupt == 0:
changeIntPos(myTbPath, 0)
changeIntPos(otherTbPath, 0)
runISE(myPrjPath, testAnsPath)
runISE(otherPrjPath, stdAnsPath)
isAC = cmp(0, testAnsPath, stdAnsPath, cmpResPath)
elif interrupt == -1:
for j in range(1, textLen):
intPos = j * 4 + 0x3000
changeIntPos(myTbPath, intPos)
changeIntPos(otherTbPath, intPos)
runISE(myPrjPath, testAnsPath)
runISE(otherPrjPath, stdAnsPath)
if not cmp(intPos, testAnsPath, stdAnsPath, cmpResPath):
isAC = False
break
else:
changeIntPos(myTbPath, interrupt)
changeIntPos(otherTbPath, interrupt)
runISE(myPrjPath, testAnsPath)
runISE(otherPrjPath, stdAnsPath)
isAC = cmp(interrupt, testAnsPath, stdAnsPath, cmpResPath)
if isAC:
with open(cmpResPath, "a") as out:
out.write("\n\tAll Accepted\n")
print("\n\tAll Accepted")
print("\n----------------------------------------------------------------") | verilogPath = prj + "my_files\\cpu\\"
prjFilePath = prj + "mips.prj"
tclFilePath = prj + "mips.tcl"
with open(prjFilePath, "w") as prjFile, open(tclFilePath, "w") as tclFile:
for root, dirs, files in os.walk(verilogPath):
for fileName in files:
if re.match(r"[\w]*\.v", fileName):
prjFile.write("Verilog work " + root + "\\" + fileName + "\n")
tclFile.write("run 200us" + "\n" + "exit") |
env.go | /*
Copyright 2020 TriggerMesh Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0 |
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package salesforcesource
import "knative.dev/eventing/pkg/adapter/v2"
// EnvAccessor for configuration parameters
func EnvAccessor() adapter.EnvConfigAccessor {
return &envAccessor{}
}
type envAccessor struct {
adapter.EnvConfig
ClientID string `envconfig:"SALESFORCE_AUTH_CLIENT_ID" required:"true"`
AuthServer string `envconfig:"SALESFORCE_AUTH_SERVER" required:"true"`
User string `envconfig:"SALESFORCE_AUTH_USER" required:"true"`
CertKey string `envconfig:"SALESFORCE_AUTH_CERT_KEY" required:"true"`
Version string `envconfig:"SALESFORCE_API_VERSION" default:"48.0"`
// We are supporting only one subscription + replayID per source instance
SubscriptionChannel string `envconfig:"SALESFORCE_SUBCRIPTION_CHANNEL" required:"true"`
SubscriptionReplayID int `envconfig:"SALESFORCE_SUBCRIPTION_REPLAY_ID" default:"-1"`
} | |
update-ref.go | package pkg
import (
"fmt"
"gitlab.com/ironstar-io/ironstar-cli/cmd/flags"
"gitlab.com/ironstar-io/ironstar-cli/internal/api"
"gitlab.com/ironstar-io/ironstar-cli/internal/errs"
"gitlab.com/ironstar-io/ironstar-cli/internal/services"
"github.com/fatih/color"
"github.com/pkg/errors"
)
func UpdateRef(args []string, flg flags.Accumulator) error {
creds, err := services.ResolveUserCredentials(flg.Login)
if err != nil {
return err
}
sub, err := api.GetSubscriptionContext(creds, flg)
if err != nil {
return err
}
if sub.Alias == "" {
return errors.New("No Ironstar subscription has been linked to this project. Have you run `iron subscription link [subscription-name]`")
}
color.Green("Using login [" + creds.Login + "] for subscription <" + sub.Alias + ">")
pi, err := getBuildName(args)
if err != nil {
return err
}
ref, err := getRef(flg)
if err != nil {
return err
}
req := &api.Request{
RunTokenRefresh: true,
Credentials: creds,
Method: "PUT",
Path: "/build/" + pi,
MapStringPayload: map[string]interface{}{
"ref": ref,
},
}
res, err := req.NankaiSend()
if err != nil {
return errors.Wrap(err, errs.APISubListErrorMsg)
}
if res.StatusCode != 204 {
return res.HandleFailure()
}
fmt.Println()
color.Green("Completed successfully!")
return nil
}
func getBuildName(args []string) (string, error) {
if len(args) != 0 {
return args[0], nil
}
pi, err := services.StdinPrompt("Package ID: ")
if err != nil {
return "", errors.New("No package idenitifer supplied")
}
return pi, nil
}
func getRef(flg flags.Accumulator) (string, error) {
if flg.Ref != "" {
return flg.Ref, nil
}
r, err := services.StdinPrompt("New Ref: ")
if err != nil |
return r, nil
}
| {
return "", errors.New("A new ref for the package was not supplied")
} |
long.rs | use zcash_history::{Entry, EntryLink, NodeData, Tree};
#[path = "lib/shared.rs"]
mod share;
fn draft(into: &mut Vec<(u32, Entry)>, vec: &Vec<NodeData>, peak_pos: usize, h: u32) |
fn prepare_tree(vec: &Vec<NodeData>) -> Tree {
assert!(vec.len() > 0);
// integer log2 of (vec.len()+1), -1
let mut h = (32 - ((vec.len() + 1) as u32).leading_zeros() - 1) - 1;
let mut peak_pos = (1 << (h + 1)) - 1;
let mut nodes = Vec::new();
// used later
let mut last_peak_pos = 0;
let mut last_peak_h = 0;
loop {
if peak_pos > vec.len() {
// left child, -2^h
peak_pos = peak_pos - (1 << h);
h = h - 1;
}
if peak_pos <= vec.len() {
draft(&mut nodes, vec, peak_pos, h);
// save to be used in next loop
last_peak_pos = peak_pos;
last_peak_h = h;
// right sibling
peak_pos = peak_pos + (1 << (h + 1)) - 1;
}
if h == 0 {
break;
}
}
// for deletion, everything on the right slope of the last peak should be pre-loaded
let mut extra = Vec::new();
let mut h = last_peak_h;
let mut peak_pos = last_peak_pos;
while h > 0 {
let left_pos = peak_pos - (1 << h);
let right_pos = peak_pos - 1;
h = h - 1;
// drafting left child
draft(&mut extra, vec, left_pos, h);
// drafting right child
draft(&mut extra, vec, right_pos, h);
// continuing on right slope
peak_pos = right_pos;
}
println!("Total extra of {} required for deletion!", extra.len());
Tree::new(vec.len() as u32, nodes, extra)
}
fn main() {
let number = match std::env::args().skip(1).next() {
None => {
eprintln!("writer <number of nodes> [<out_file>]");
std::process::exit(1);
}
Some(number) => number.parse::<usize>().expect("invalid number"),
};
let long_vec = share::NodeDataIterator::new()
.take(number)
.collect::<Vec<NodeData>>();
let now = std::time::Instant::now();
let tree = prepare_tree(&long_vec);
let elapsed = now.elapsed();
println!(
"Tree final root: {}-{}",
tree.root_node().expect("root").data().start_height,
tree.root_node().expect("root").data().end_height,
);
println!(
"Prepare tree of {} length: {} ns / {} mcs / {} ms",
number,
elapsed.as_nanos(),
elapsed.as_micros(),
elapsed.as_millis()
);
}
| {
let node_data = vec[peak_pos - 1].clone();
let peak: Entry = match h {
0 => node_data.into(),
_ => Entry::new(
node_data,
EntryLink::Stored((peak_pos - (1 << h) - 1) as u32),
EntryLink::Stored((peak_pos - 2) as u32),
),
};
println!("Entry #{}: {}", into.len(), peak);
into.push(((peak_pos - 1) as u32, peak));
} |
Close.js | import React from 'react'
import Base from './Base'
/**
* A button with an × for close and dismiss actions
*/
const Close = (props, { rebass }) => {
return (
<Base {...props}
tagName='button'
className='Close'
title='Close'
baseStyle={{
fontSize: '1.5em',
lineHeight: 1,
fontWeight: 'bold',
margin: 0,
padding: 0,
cursor: 'pointer',
color: 'inherit',
backgroundColor: 'transparent',
border: 0,
WebkitAppearance: 'none'
}}
children='×' />
)
}
Close.contextTypes = { |
export default Close | rebass: React.PropTypes.object
} |
testCountSort.js | /* eslint-env mocha */
const CountSort = require('../../../src').algorithms.sort.CountSort;
const assert = require('assert');
describe('Count Sort', () => {
it('should have no data when empty initialization', () => {
const inst = new CountSort();
assert.equal(inst.size, 0);
assert.deepEqual(inst.unsortedList, []);
assert.deepEqual(inst.sortedList, []);
});
it('should sort the array', () => { |
assert.deepEqual(inst.unsortedList, [2, 1, 3, 4]);
assert.deepEqual(inst.sortedList, [1, 2, 3, 4]);
assert.equal(inst.toString(), '1, 2, 3, 4');
});
it('should sort the array in ascending order with few equal vals', () => {
const inst = new CountSort([2, 1, 3, 4, 2]);
assert.equal(inst.size, 5);
assert.deepEqual(inst.unsortedList, [2, 1, 3, 4, 2]);
assert.deepEqual(inst.sortedList, [1, 2, 2, 3, 4]);
assert.equal(inst.toString(), '1, 2, 2, 3, 4');
});
it('should sort 2 element array', () => {
const inst = new CountSort([2, 1]);
assert.equal(inst.size, 2);
assert.deepEqual(inst.unsortedList, [2, 1]);
assert.deepEqual(inst.sortedList, [1, 2]);
assert.equal(inst.toString(), '1, 2');
});
it('should sort 1 element array', () => {
const inst = new CountSort([1]);
assert.equal(inst.size, 1);
assert.deepEqual(inst.unsortedList, [1]);
assert.deepEqual(inst.sortedList, [1]);
assert.equal(inst.toString(), '1');
});
it('should sort the array in decending order', () => {
const inst = new CountSort([2, 1, 3, 4], true);
assert.equal(inst.size, 4);
assert.deepEqual(inst.unsortedList, [2, 1, 3, 4]);
assert.deepEqual(inst.sortedList, [4, 3, 2, 1]);
assert.equal(inst.toString(), '4, 3, 2, 1');
});
it('should sort the array in decending order with few equal vals', () => {
const inst = new CountSort([2, 1, 3, 4, 2], true);
assert.equal(inst.size, 5);
assert.deepEqual(inst.unsortedList, [2, 1, 3, 4, 2]);
assert.deepEqual(inst.sortedList, [4, 3, 2, 2, 1]);
assert.equal(inst.toString(), '4, 3, 2, 2, 1');
});
}); | const inst = new CountSort([2, 1, 3, 4]);
assert.equal(inst.size, 4); |
selection.js | import isPlainObject from 'is-plain-object';
import { Record, Set } from 'immutable';
import Mark from './mark';
import Point from './point';
import Range from './range';
/**
* Default properties.
*
* @type {Object}
*/
const DEFAULTS = {
anchor: undefined,
focus: undefined,
isFocused: undefined,
marks: undefined,
};
/**
* Selection.
*
* @type {Selection}
*/
class | extends Record(DEFAULTS) {
/**
* Create a new `Selection` with `attrs`.
*
* @param {Object|Selection} attrs
* @return {Selection}
*/
static create(attrs = {}) {
if (Selection.isSelection(attrs)) {
return attrs;
}
if (Range.isRange(attrs)) {
return Selection.fromJSON(Range.createProperties(attrs));
}
if (isPlainObject(attrs)) {
return Selection.fromJSON(attrs);
}
throw new Error(
`\`Selection.create\` only accepts objects, ranges or selections, but you passed it: ${attrs}`,
);
}
/**
* Create a dictionary of settable selection properties from `attrs`.
*
* @param {Object|String|Selection} attrs
* @return {Object}
*/
static createProperties(a = {}) {
if (Selection.isSelection(a)) {
return {
anchor: Point.createProperties(a.anchor),
focus: Point.createProperties(a.focus),
isFocused: a.isFocused,
marks: a.marks,
};
}
if (Range.isRange(a)) {
return {
anchor: Point.createProperties(a.anchor),
focus: Point.createProperties(a.focus),
};
}
if (isPlainObject(a)) {
const p = {};
if ('anchor' in a) p.anchor = Point.create(a.anchor);
if ('focus' in a) p.focus = Point.create(a.focus);
if ('isFocused' in a) p.isFocused = a.isFocused;
if ('marks' in a) { p.marks = a.marks == null ? null : Mark.createSet(a.marks); }
return p;
}
throw new Error(
`\`Selection.createProperties\` only accepts objects, ranges or selections, but you passed it: ${a}`,
);
}
/**
* Create a `Selection` from a JSON `object`.
*
* @param {Object} object
* @return {Selection}
*/
static fromJSON(object) {
const {
anchor, focus, isFocused = false, marks = null,
} = object;
const selection = new Selection({
anchor: Point.fromJSON(anchor || {}),
focus: Point.fromJSON(focus || {}),
isFocused,
marks: marks == null ? null : new Set(marks.map(Mark.fromJSON)),
});
return selection;
}
/**
* Check whether the selection is blurred.
*
* @return {Boolean}
*/
get isBlurred() {
return !this.isFocused;
}
/**
* Set the `isFocused` property to a new `value`.
*
* @param {Boolean} value
* @return {Selection}
*/
setIsFocused(value) {
const selection = this.set('isFocused', value);
return selection;
}
/**
* Set the `marks` property to a new set of `marks`.
*
* @param {Set} marks
* @return {Selection}
*/
setMarks(marks) {
const selection = this.set('marks', marks);
return selection;
}
/**
* Set new `properties` on the selection.
*
* @param {Object|Range|Selection} properties
* @return {Range}
*/
setProperties(properties) {
properties = Selection.createProperties(properties);
const { anchor, focus, ...props } = properties;
if (anchor) {
props.anchor = Point.create(anchor);
}
if (focus) {
props.focus = Point.create(focus);
}
const selection = this.merge(props);
return selection;
}
/**
* Return a JSON representation of the selection.
*
* @param {Object} options
* @return {Object}
*/
toJSON(options = {}) {
const object = {
object: this.object,
anchor: this.anchor.toJSON(options),
focus: this.focus.toJSON(options),
isFocused: this.isFocused,
marks:
this.marks == null ? null : this.marks.toArray().map((m) => m.toJSON()),
};
return object;
}
}
/**
* Export.
*
* @type {Selection}
*/
export default Selection;
| Selection |
3.76af6655.js | /*! For license information please see 3.76af6655.js.LICENSE.txt */
(window.webpackJsonp=window.webpackJsonp||[]).push([[3],{335:function(e,t){var r=Object.prototype.hasOwnProperty,o=Object.prototype.toString;e.exports=function(e,t,n){if("[object Function]"!==o.call(t))throw new TypeError("iterator must be a function");var s=e.length;if(s===+s)for(var a=0;a<s;a++)t.call(n,e[a],a,e);else for(var i in e)r.call(e,i)&&t.call(n,e[i],i,e)}},336:function(e,t){e.exports=function(e){return JSON.parse(JSON.stringify(e))}},341:function(e,t){var r,o,n=e.exports={};function s(){throw new Error("setTimeout has not been defined")}function a(){throw new Error("clearTimeout has not been defined")}function i(e){if(r===setTimeout)return setTimeout(e,0);if((r===s||!r)&&setTimeout)return r=setTimeout,setTimeout(e,0);try{return r(e,0)}catch(t){try{return r.call(null,e,0)}catch(t){return r.call(this,e,0)}}}!function(){try{r="function"==typeof setTimeout?setTimeout:s}catch(e){r=s}try{o="function"==typeof clearTimeout?clearTimeout:a}catch(e){o=a}}();var c,u=[],l=!1,p=-1;function h(){l&&c&&(l=!1,c.length?u=c.concat(u):p=-1,u.length&&f())}function f(){if(!l){var e=i(h);l=!0;for(var t=u.length;t;){for(c=u,u=[];++p<t;)c&&c[p].run();p=-1,t=u.length}c=null,l=!1,function(e){if(o===clearTimeout)return clearTimeout(e);if((o===a||!o)&&clearTimeout)return o=clearTimeout,clearTimeout(e);try{o(e)}catch(t){try{return o.call(null,e)}catch(t){return o.call(this,e)}}}(e)}}function d(e,t){this.fun=e,this.array=t}function y(){}n.nextTick=function(e){var t=new Array(arguments.length-1);if(arguments.length>1)for(var r=1;r<arguments.length;r++)t[r-1]=arguments[r];u.push(new d(e,t)),1!==u.length||l||i(f)},d.prototype.run=function(){this.fun.apply(null,this.array)},n.title="browser",n.browser=!0,n.env={},n.argv=[],n.version="",n.versions={},n.on=y,n.addListener=y,n.once=y,n.off=y,n.removeListener=y,n.removeAllListeners=y,n.emit=y,n.prependListener=y,n.prependOnceListener=y,n.listeners=function(e){return[]},n.binding=function(e){throw new Error("process.binding is not supported")},n.cwd=function(){return"/"},n.chdir=function(e){throw new Error("process.chdir is not supported")},n.umask=function(){return 0}},342:function(e,t,r){"use strict";var o=r(373);function n(e,t){var o=r(335),n=this;"function"==typeof Error.captureStackTrace?Error.captureStackTrace(this,this.constructor):n.stack=(new Error).stack||"Cannot get a stacktrace, browser is too old",this.name="AlgoliaSearchError",this.message=e||"Unknown error",t&&o(t,(function(e,t){n[t]=e}))}function s(e,t){function r(){var r=Array.prototype.slice.call(arguments,0);"string"!=typeof r[0]&&r.unshift(t),n.apply(this,r),this.name="AlgoliaSearch"+e+"Error"}return o(r,n),r}o(n,Error),e.exports={AlgoliaSearchError:n,UnparsableJSON:s("UnparsableJSON","Could not parse the incoming response as JSON, see err.more for details"),RequestTimeout:s("RequestTimeout","Request timed out before getting a response"),Network:s("Network","Network issue, see err.more for details"),JSONPScriptFail:s("JSONPScriptFail","<script> was loaded but did not call our provided callback"),ValidUntilNotFound:s("ValidUntilNotFound","The SecuredAPIKey does not have a validUntil parameter."),JSONPScriptError:s("JSONPScriptError","<script> unable to load due to an `error` event on it"),ObjectNotFound:s("ObjectNotFound","Object not found"),Unknown:s("Unknown","Unknown error occured")}},343:function(e,t){var r={}.toString;e.exports=Array.isArray||function(e){return"[object Array]"==r.call(e)}},344:function(e,t,r){var o=r(335);e.exports=function(e,t){var r=[];return o(e,(function(o,n){r.push(t(o,n,e))})),r}},345:function(e,t,r){(function(o){function n(){var e;try{e=t.storage.debug}catch(r){}return!e&&void 0!==o&&"env"in o&&(e=o.env.DEBUG),e}(t=e.exports=r(429)).log=function(){return"object"==typeof console&&console.log&&Function.prototype.apply.call(console.log,console,arguments)},t.formatArgs=function(e){var r=this.useColors;if(e[0]=(r?"%c":"")+this.namespace+(r?" %c":" ")+e[0]+(r?"%c ":" ")+"+"+t.humanize(this.diff),!r)return;var o="color: "+this.color;e.splice(1,0,o,"color: inherit");var n=0,s=0;e[0].replace(/%[a-zA-Z%]/g,(function(e){"%%"!==e&&(n++,"%c"===e&&(s=n))})),e.splice(s,0,o)},t.save=function(e){try{null==e?t.storage.removeItem("debug"):t.storage.debug=e}catch(r){}},t.load=n,t.useColors=function(){if("undefined"!=typeof window&&window.process&&"renderer"===window.process.type)return!0;return"undefined"!=typeof document&&document.documentElement&&document.documentElement.style&&document.documentElement.style.WebkitAppearance||"undefined"!=typeof window&&window.console&&(window.console.firebug||window.console.exception&&window.console.table)||"undefined"!=typeof navigator&&navigator.userAgent&&navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/)&&parseInt(RegExp.$1,10)>=31||"undefined"!=typeof navigator&&navigator.userAgent&&navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)},t.storage="undefined"!=typeof chrome&&void 0!==chrome.storage?chrome.storage.local:function(){try{return window.localStorage}catch(e){}}(),t.colors=["lightseagreen","forestgreen","goldenrod","dodgerblue","darkorchid","crimson"],t.formatters.j=function(e){try{return JSON.stringify(e)}catch(t){return"[UnexpectedJSONParseError]: "+t.message}},t.enable(n())}).call(this,r(341))},372:function(e,t,r){"use strict";var o=r(420),n=r(431);e.exports=n(o,"Browser (lite)")},373:function(e,t){"function"==typeof Object.create?e.exports=function(e,t){t&&(e.super_=t,e.prototype=Object.create(t.prototype,{constructor:{value:e,enumerable:!1,writable:!0,configurable:!0}}))}:e.exports=function(e,t){if(t){e.super_=t;var r=function(){};r.prototype=t.prototype,e.prototype=new r,e.prototype.constructor=e}}},374:function(e,t,r){e.exports=function(e,t){return function(r,n,s){if("function"==typeof r&&"object"==typeof n||"object"==typeof s)throw new o.AlgoliaSearchError("index.search usage is index.search(query, params, cb)");0===arguments.length||"function"==typeof r?(s=r,r=""):1!==arguments.length&&"function"!=typeof n||(s=n,n=void 0),"object"==typeof r&&null!==r?(n=r,r=void 0):null==r&&(r="");var a,i="";return void 0!==r&&(i+=e+"="+encodeURIComponent(r)),void 0!==n&&(n.additionalUA&&(a=n.additionalUA,delete n.additionalUA),i=this.as._getSearchParams(n,i)),this._search(i,t,s,a)}};var o=r(342)},375:function(e,t,r){e.exports=function(e,t){var o=r(426),n=r(335),s={};return n(o(e),(function(r){!0!==t(r)&&(s[r]=e[r])})),s}},376:function(e,t,r){"use strict";var o=Object.prototype.toString;e.exports=function(e){var t=o.call(e),r="[object Arguments]"===t;return r||(r="[object Array]"!==t&&null!==e&&"object"==typeof e&&"number"==typeof e.length&&e.length>=0&&"[object Function]"===o.call(e.callee)),r}},377:function(e,t,r){"use strict";var o=function(e){switch(typeof e){case"string":return e;case"boolean":return e?"true":"false";case"number":return isFinite(e)?e:"";default:return""}};e.exports=function(e,t,r,i){return t=t||"&",r=r||"=",null===e&&(e=void 0),"object"==typeof e?s(a(e),(function(a){var i=encodeURIComponent(o(a))+r;return n(e[a])?s(e[a],(function(e){return i+encodeURIComponent(o(e))})).join(t):i+encodeURIComponent(o(e[a]))})).join(t):i?encodeURIComponent(o(i))+r+encodeURIComponent(o(e)):""};var n=Array.isArray||function(e){return"[object Array]"===Object.prototype.toString.call(e)};function s(e,t){if(e.map)return e.map(t);for(var r=[],o=0;o<e.length;o++)r.push(t(e[o],o));return r}var a=Object.keys||function(e){var t=[];for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&t.push(r);return t}},420:function(e,t,r){(function(t){e.exports=c;var o=r(342),n=r(421),s=r(422),a=r(428),i=t.env.RESET_APP_DATA_TIMER&&parseInt(t.env.RESET_APP_DATA_TIMER,10)||12e4;function c(e,t,n){var s=r(345)("algoliasearch"),a=r(336),i=r(343),c=r(344),l="Usage: algoliasearch(applicationID, apiKey, opts)";if(!0!==n._allowEmptyCredentials&&!e)throw new o.AlgoliaSearchError("Please provide an application ID. "+l);if(!0!==n._allowEmptyCredentials&&!t)throw new o.AlgoliaSearchError("Please provide an API key. "+l);this.applicationID=e,this.apiKey=t,this.hosts={read:[],write:[]},n=n||{},this._timeouts=n.timeouts||{connect:1e3,read:2e3,write:3e4},n.timeout&&(this._timeouts.connect=this._timeouts.read=this._timeouts.write=n.timeout);var p=n.protocol||"https:";if(/:$/.test(p)||(p+=":"),"http:"!==p&&"https:"!==p)throw new o.AlgoliaSearchError("protocol must be `http:` or `https:` (was `"+n.protocol+"`)");if(this._checkAppIdData(),n.hosts)i(n.hosts)?(this.hosts.read=a(n.hosts),this.hosts.write=a(n.hosts)):(this.hosts.read=a(n.hosts.read),this.hosts.write=a(n.hosts.write));else{var h=c(this._shuffleResult,(function(t){return e+"-"+t+".algolianet.com"})),f=(!1===n.dsn?"":"-dsn")+".algolia.net";this.hosts.read=[this.applicationID+f].concat(h),this.hosts.write=[this.applicationID+".algolia.net"].concat(h)}this.hosts.read=c(this.hosts.read,u(p)),this.hosts.write=c(this.hosts.write,u(p)),this.extraHeaders={},this.cache=n._cache||{},this._ua=n._ua,this._useCache=!(void 0!==n._useCache&&!n._cache)||n._useCache,this._useRequestCache=this._useCache&&n._useRequestCache,this._useFallback=void 0===n.useFallback||n.useFallback,this._setTimeout=n._setTimeout,s("init done, %j",this)}function u(e){return function(t){return e+"//"+t.toLowerCase()}}function l(e){if(void 0===Array.prototype.toJSON)return JSON.stringify(e);var t=Array.prototype.toJSON;delete Array.prototype.toJSON;var r=JSON.stringify(e);return Array.prototype.toJSON=t,r}function p(e){var t={};for(var r in e){var o;if(Object.prototype.hasOwnProperty.call(e,r))o="x-algolia-api-key"===r||"x-algolia-application-id"===r?"**hidden for security purposes**":e[r],t[r]=o}return t}c.prototype.initIndex=function(e){return new s(this,e)},c.prototype.setExtraHeader=function(e,t){this.extraHeaders[e.toLowerCase()]=t},c.prototype.getExtraHeader=function(e){return this.extraHeaders[e.toLowerCase()]},c.prototype.unsetExtraHeader=function(e){delete this.extraHeaders[e.toLowerCase()]},c.prototype.addAlgoliaAgent=function(e){var t="; "+e;-1===this._ua.indexOf(t)&&(this._ua+=t)},c.prototype._jsonRequest=function(e){this._checkAppIdData();var t,s,a,i=r(345)("algoliasearch:"+e.url),c=e.additionalUA||"",u=e.cache,h=this,f=0,d=!1,y=h._useFallback&&h._request.fallback&&e.fallback;this.apiKey.length>500&&void 0!==e.body&&(void 0!==e.body.params||void 0!==e.body.requests)?(e.body.apiKey=this.apiKey,a=this._computeRequestHeaders({additionalUA:c,withApiKey:!1,headers:e.headers})):a=this._computeRequestHeaders({additionalUA:c,headers:e.headers}),void 0!==e.body&&(t=l(e.body)),i("request start");var m=[];function g(e,t,r){return h._useCache&&e&&t&&void 0!==t[r]}function v(t,r){if(g(h._useRequestCache,u,s)&&t.catch((function(){delete u[s]})),"function"!=typeof e.callback)return t.then(r);t.then((function(t){n((function(){e.callback(null,r(t))}),h._setTimeout||setTimeout)}),(function(t){n((function(){e.callback(t)}),h._setTimeout||setTimeout)}))}if(h._useCache&&h._useRequestCache&&(s=e.url),h._useCache&&h._useRequestCache&&t&&(s+="_body_"+t),g(h._useRequestCache,u,s)){i("serving request from cache");var b=u[s];return v("function"!=typeof b.then?h._promise.resolve({responseText:b}):b,(function(e){return JSON.parse(e.responseText)}))}var w=function r(n,v){h._checkAppIdData();var b=new Date;if(h._useCache&&!h._useRequestCache&&(s=e.url),h._useCache&&!h._useRequestCache&&t&&(s+="_body_"+v.body),g(!h._useRequestCache,u,s)){i("serving response from cache");var w=u[s];return h._promise.resolve({body:JSON.parse(w),responseText:w})}if(f>=h.hosts[e.hostType].length)return!y||d?(i("could not get any response"),h._promise.reject(new o.AlgoliaSearchError("Cannot connect to the AlgoliaSearch API. Send an email to [email protected] to report and resolve the issue. Application id was: "+h.applicationID,{debugData:m}))):(i("switching to fallback"),f=0,v.method=e.fallback.method,v.url=e.fallback.url,v.jsonBody=e.fallback.body,v.jsonBody&&(v.body=l(v.jsonBody)),a=h._computeRequestHeaders({additionalUA:c,headers:e.headers}),v.timeouts=h._getTimeoutsForRequest(e.hostType),h._setHostIndexByType(0,e.hostType),d=!0,r(h._request.fallback,v));var _=h._getHostByType(e.hostType),x=_+v.url,T={body:v.body,jsonBody:v.jsonBody,method:v.method,headers:a,timeouts:v.timeouts,debug:i,forceAuthHeaders:v.forceAuthHeaders};return i("method: %s, url: %s, headers: %j, timeouts: %d",T.method,x,T.headers,T.timeouts),n===h._request.fallback&&i("using fallback"),n.call(h,x,T).then((function(e){var r=e&&e.body&&e.body.message&&e.body.status||e.statusCode||e&&e.body&&200;i("received response: statusCode: %s, computed statusCode: %d, headers: %j",e.statusCode,r,e.headers);var n=2===Math.floor(r/100),c=new Date;if(m.push({currentHost:_,headers:p(a),content:t||null,contentLength:void 0!==t?t.length:null,method:v.method,timeouts:v.timeouts,url:v.url,startTime:b,endTime:c,duration:c-b,statusCode:r}),n)return h._useCache&&!h._useRequestCache&&u&&(u[s]=e.responseText),{responseText:e.responseText,body:e.body};if(4!==Math.floor(r/100))return f+=1,S();i("unrecoverable error");var l=new o.AlgoliaSearchError(e.body&&e.body.message,{debugData:m,statusCode:r});return h._promise.reject(l)}),(function(s){i("error: %s, stack: %s",s.message,s.stack);var c=new Date;m.push({currentHost:_,headers:p(a),content:t||null,contentLength:void 0!==t?t.length:null,method:v.method,timeouts:v.timeouts,url:v.url,startTime:b,endTime:c,duration:c-b}),s instanceof o.AlgoliaSearchError||(s=new o.Unknown(s&&s.message,s));if(f+=1,s instanceof o.Unknown||s instanceof o.UnparsableJSON||f>=h.hosts[e.hostType].length&&(d||!y))return s.debugData=m,h._promise.reject(s);if(s instanceof o.RequestTimeout)return i("retrying request with higher timeout"),h._incrementHostIndex(e.hostType),h._incrementTimeoutMultipler(),v.timeouts=h._getTimeoutsForRequest(e.hostType),r(n,v);return S()}));function S(){return i("retrying request"),h._incrementHostIndex(e.hostType),r(n,v)}}(h._request,{url:e.url,method:e.method,body:t,jsonBody:e.body,timeouts:h._getTimeoutsForRequest(e.hostType),forceAuthHeaders:e.forceAuthHeaders});return h._useCache&&h._useRequestCache&&u&&(u[s]=w),v(w,(function(e){return e.body}))},c.prototype._getSearchParams=function(e,t){if(null==e)return t;for(var r in e)null!==r&&void 0!==e[r]&&e.hasOwnProperty(r)&&(t+=""===t?"":"&",t+=r+"="+encodeURIComponent("[object Array]"===Object.prototype.toString.call(e[r])?l(e[r]):e[r]));return t},c.prototype._computeRequestHeaders=function(e){var t=r(335),o={"x-algolia-agent":e.additionalUA?this._ua+"; "+e.additionalUA:this._ua,"x-algolia-application-id":this.applicationID};return!1!==e.withApiKey&&(o["x-algolia-api-key"]=this.apiKey),this.userToken&&(o["x-algolia-usertoken"]=this.userToken),this.securityTags&&(o["x-algolia-tagfilters"]=this.securityTags),t(this.extraHeaders,(function(e,t){o[t]=e})),e.headers&&t(e.headers,(function(e,t){o[t]=e})),o},c.prototype.search=function(e,t,o){var n=r(343),s=r(344);if(!n(e))throw new Error("Usage: client.search(arrayOfQueries[, callback])");"function"==typeof t?(o=t,t={}):void 0===t&&(t={});var a=this,i={requests:s(e,(function(e){var t="";return void 0!==e.query&&(t+="query="+encodeURIComponent(e.query)),{indexName:e.indexName,params:a._getSearchParams(e.params,t)}}))},c=s(i.requests,(function(e,t){return t+"="+encodeURIComponent("/1/indexes/"+encodeURIComponent(e.indexName)+"?"+e.params)})).join("&");return void 0!==t.strategy&&(i.strategy=t.strategy),this._jsonRequest({cache:this.cache,method:"POST",url:"/1/indexes/*/queries",body:i,hostType:"read",fallback:{method:"GET",url:"/1/indexes/*",body:{params:c}},callback:o})},c.prototype.searchForFacetValues=function(e){var t=r(343),o=r(344),n="Usage: client.searchForFacetValues([{indexName, params: {facetName, facetQuery, ...params}}, ...queries])";if(!t(e))throw new Error(n);var s=this;return s._promise.all(o(e,(function(e){if(!e||void 0===e.indexName||void 0===e.params.facetName||void 0===e.params.facetQuery)throw new Error(n);var t=r(336),o=r(375),a=e.indexName,i=e.params,c=i.facetName,u=o(t(i),(function(e){return"facetName"===e})),l=s._getSearchParams(u,"");return s._jsonRequest({cache:s.cache,method:"POST",url:"/1/indexes/"+encodeURIComponent(a)+"/facets/"+encodeURIComponent(c)+"/query",hostType:"read",body:{params:l}})})))},c.prototype.setSecurityTags=function(e){if("[object Array]"===Object.prototype.toString.call(e)){for(var t=[],r=0;r<e.length;++r)if("[object Array]"===Object.prototype.toString.call(e[r])){for(var o=[],n=0;n<e[r].length;++n)o.push(e[r][n]);t.push("("+o.join(",")+")")}else t.push(e[r]);e=t.join(",")}this.securityTags=e},c.prototype.setUserToken=function(e){this.userToken=e},c.prototype.clearCache=function(){this.cache={}},c.prototype.setRequestTimeout=function(e){e&&(this._timeouts.connect=this._timeouts.read=this._timeouts.write=e)},c.prototype.setTimeouts=function(e){this._timeouts=e},c.prototype.getTimeouts=function(){return this._timeouts},c.prototype._getAppIdData=function(){var e=a.get(this.applicationID);return null!==e&&this._cacheAppIdData(e),e},c.prototype._setAppIdData=function(e){return e.lastChange=(new Date).getTime(),this._cacheAppIdData(e),a.set(this.applicationID,e)},c.prototype._checkAppIdData=function(){var e=this._getAppIdData(),t=(new Date).getTime();return null===e||t-e.lastChange>i?this._resetInitialAppIdData(e):e},c.prototype._resetInitialAppIdData=function(e){var t=e||{};return t.hostIndexes={read:0,write:0},t.timeoutMultiplier=1,t.shuffleResult=t.shuffleResult||function(e){var t,r,o=e.length;for(;0!==o;)r=Math.floor(Math.random()*o),t=e[o-=1],e[o]=e[r],e[r]=t;return e}([1,2,3]),this._setAppIdData(t)},c.prototype._cacheAppIdData=function(e){this._hostIndexes=e.hostIndexes,this._timeoutMultiplier=e.timeoutMultiplier,this._shuffleResult=e.shuffleResult},c.prototype._partialAppIdDataUpdate=function(e){var t=r(335),o=this._getAppIdData();return t(e,(function(e,t){o[t]=e})),this._setAppIdData(o)},c.prototype._getHostByType=function(e){return this.hosts[e][this._getHostIndexByType(e)]},c.prototype._getTimeoutMultiplier=function(){return this._timeoutMultiplier},c.prototype._getHostIndexByType=function(e){return this._hostIndexes[e]},c.prototype._setHostIndexByType=function(e,t){var o=r(336)(this._hostIndexes);return o[t]=e,this._partialAppIdDataUpdate({hostIndexes:o}),e},c.prototype._incrementHostIndex=function(e){return this._setHostIndexByType((this._getHostIndexByType(e)+1)%this.hosts[e].length,e)},c.prototype._incrementTimeoutMultipler=function(){var e=Math.max(this._timeoutMultiplier+1,4);return this._partialAppIdDataUpdate({timeoutMultiplier:e})},c.prototype._getTimeoutsForRequest=function(e){return{connect:this._timeouts.connect*this._timeoutMultiplier,complete:this._timeouts[e]*this._timeoutMultiplier}}}).call(this,r(341))},421:function(e,t){e.exports=function(e,t){t(e,0)}},422:function(e,t,r){var o=r(374),n=r(423),s=r(424);function a(e,t){this.indexName=t,this.as=e,this.typeAheadArgs=null,this.typeAheadValueOption=null,this.cache={}}e.exports=a,a.prototype.clearCache=function(){this.cache={}},a.prototype.search=o("query"),a.prototype.similarSearch=n(o("similarQuery"),s("index.similarSearch(query[, callback])","index.search({ similarQuery: query }[, callback])")),a.prototype.browse=function(e,t,o){var n,s,a=r(425),i=this;0===arguments.length||1===arguments.length&&"function"==typeof arguments[0]?(n=0,o=arguments[0],e=void 0):"number"==typeof arguments[0]?(n=arguments[0],"number"==typeof arguments[1]?s=arguments[1]:"function"==typeof arguments[1]&&(o=arguments[1],s=void 0),e=void 0,t=void 0):"object"==typeof arguments[0]?("function"==typeof arguments[1]&&(o=arguments[1]),t=arguments[0],e=void 0):"string"==typeof arguments[0]&&"function"==typeof arguments[1]&&(o=arguments[1],t=void 0),t=a({},t||{},{page:n,hitsPerPage:s,query:e});var c=this.as._getSearchParams(t,"");return this.as._jsonRequest({method:"POST",url:"/1/indexes/"+encodeURIComponent(i.indexName)+"/browse",body:{params:c},hostType:"read",callback:o})},a.prototype.browseFrom=function(e,t){return this.as._jsonRequest({method:"POST",url:"/1/indexes/"+encodeURIComponent(this.indexName)+"/browse",body:{cursor:e},hostType:"read",callback:t})},a.prototype.searchForFacetValues=function(e,t){var o=r(336),n=r(375);if(void 0===e.facetName||void 0===e.facetQuery)throw new Error("Usage: index.searchForFacetValues({facetName, facetQuery, ...params}[, callback])");var s=e.facetName,a=n(o(e),(function(e){return"facetName"===e})),i=this.as._getSearchParams(a,"");return this.as._jsonRequest({method:"POST",url:"/1/indexes/"+encodeURIComponent(this.indexName)+"/facets/"+encodeURIComponent(s)+"/query",hostType:"read",body:{params:i},callback:t})},a.prototype.searchFacet=n((function(e,t){return this.searchForFacetValues(e,t)}),s("index.searchFacet(params[, callback])","index.searchForFacetValues(params[, callback])")),a.prototype._search=function(e,t,r,o){return this.as._jsonRequest({cache:this.cache,method:"POST",url:t||"/1/indexes/"+encodeURIComponent(this.indexName)+"/query",body:{params:e},hostType:"read",fallback:{method:"GET",url:"/1/indexes/"+encodeURIComponent(this.indexName),body:{params:e}},callback:r,additionalUA:o})},a.prototype.getObject=function(e,t,r){var o=this;1!==arguments.length&&"function"!=typeof t||(r=t,t=void 0);var n="";if(void 0!==t){n="?attributes=";for(var s=0;s<t.length;++s)0!==s&&(n+=","),n+=t[s]}return this.as._jsonRequest({method:"GET",url:"/1/indexes/"+encodeURIComponent(o.indexName)+"/"+encodeURIComponent(e)+n,hostType:"read",callback:r})},a.prototype.getObjects=function(e,t,o){var n=r(343),s=r(344),a="Usage: index.getObjects(arrayOfObjectIDs[, callback])";if(!n(e))throw new Error(a);var i=this;1!==arguments.length&&"function"!=typeof t||(o=t,t=void 0);var c={requests:s(e,(function(e){var r={indexName:i.indexName,objectID:e};return t&&(r.attributesToRetrieve=t.join(",")),r}))};return this.as._jsonRequest({method:"POST",url:"/1/indexes/*/objects",hostType:"read",body:c,callback:o})},a.prototype.as=null,a.prototype.indexName=null,a.prototype.typeAheadArgs=null,a.prototype.typeAheadValueOption=null},423:function(e,t){e.exports=function(e,t){var r=!1;return function(){return r||(console.warn(t),r=!0),e.apply(this,arguments)}}},424:function(e,t){e.exports=function(e,t){var r=e.toLowerCase().replace(/[\.\(\)]/g,"");return"algoliasearch: `"+e+"` was replaced by `"+t+"`. Please see https://github.com/algolia/algoliasearch-client-javascript/wiki/Deprecated#"+r}},425:function(e,t,r){var o=r(335);e.exports=function e(t){var r=Array.prototype.slice.call(arguments);return o(r,(function(r){for(var o in r)r.hasOwnProperty(o)&&("object"==typeof t[o]&&"object"==typeof r[o]?t[o]=e({},t[o],r[o]):void 0!==r[o]&&(t[o]=r[o]))})),t}},426:function(e,t,r){"use strict";var o=Array.prototype.slice,n=r(376),s=Object.keys,a=s?function(e){return s(e)}:r(427),i=Object.keys;a.shim=function(){Object.keys?function(){var e=Object.keys(arguments);return e&&e.length===arguments.length}(1,2)||(Object.keys=function(e){return n(e)?i(o.call(e)):i(e)}):Object.keys=a;return Object.keys||a},e.exports=a},427:function(e,t,r){"use strict";var o;if(!Object.keys){var n=Object.prototype.hasOwnProperty,s=Object.prototype.toString,a=r(376),i=Object.prototype.propertyIsEnumerable,c=!i.call({toString:null},"toString"),u=i.call((function(){}),"prototype"),l=["toString","toLocaleString","valueOf","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","constructor"],p=function(e){var t=e.constructor;return t&&t.prototype===e},h={$applicationCache:!0,$console:!0,$external:!0,$frame:!0,$frameElement:!0,$frames:!0,$innerHeight:!0,$innerWidth:!0,$onmozfullscreenchange:!0,$onmozfullscreenerror:!0,$outerHeight:!0,$outerWidth:!0,$pageXOffset:!0,$pageYOffset:!0,$parent:!0,$scrollLeft:!0,$scrollTop:!0,$scrollX:!0,$scrollY:!0,$self:!0,$webkitIndexedDB:!0,$webkitStorageInfo:!0,$window:!0},f=function(){if("undefined"==typeof window)return!1;for(var e in window)try{if(!h["$"+e]&&n.call(window,e)&&null!==window[e]&&"object"==typeof window[e])try{p(window[e])}catch(t){return!0}}catch(t){return!0}return!1}();o=function(e){var t=null!==e&&"object"==typeof e,r="[object Function]"===s.call(e),o=a(e),i=t&&"[object String]"===s.call(e),h=[];if(!t&&!r&&!o)throw new TypeError("Object.keys called on a non-object");var d=u&&r;if(i&&e.length>0&&!n.call(e,0))for(var y=0;y<e.length;++y)h.push(String(y));if(o&&e.length>0)for(var m=0;m<e.length;++m)h.push(String(m));else for(var g in e)d&&"prototype"===g||!n.call(e,g)||h.push(String(g));if(c)for(var v=function(e){if("undefined"==typeof window||!f)return p(e);try{return p(e)}catch(t){return!1}}(e),b=0;b<l.length;++b)v&&"constructor"===l[b]||!n.call(e,l[b])||h.push(l[b]);return h}}e.exports=o},428:function(e,t,r){(function(t){var o,n=r(345)("algoliasearch:src/hostIndexState.js"),s={state:{},set:function(e,t){return this.state[e]=t,this.state[e]},get:function(e){return this.state[e]||null}},a={set:function(e,r){s.set(e,r);try{var o=JSON.parse(t.localStorage["algoliasearch-client-js"]);return o[e]=r,t.localStorage["algoliasearch-client-js"]=JSON.stringify(o),o[e]}catch(n){return i(e,n)}},get:function(e){try{return JSON.parse(t.localStorage["algoliasearch-client-js"])[e]||null}catch(r){return i(e,r)}}};function i(e,r){return n("localStorage failed with",r),function(){try{t.localStorage.removeItem("algoliasearch-client-js")}catch(e){}}(),(o=s).get(e)}function c(e,t){return 1===arguments.length?o.get(e):o.set(e,t)}function u(){try{return"localStorage"in t&&null!==t.localStorage&&(t.localStorage["algoliasearch-client-js"]||t.localStorage.setItem("algoliasearch-client-js",JSON.stringify({})),!0)}catch(e){return!1}}o=u()?a:s,e.exports={get:c,set:c,supportsLocalStorage:u}}).call(this,r(52))},429:function(e,t,r){var o;function n(e){function r(){if(r.enabled){var e=r,n=+new Date,s=n-(o||n);e.diff=s,e.prev=o,e.curr=n,o=n;for(var a=new Array(arguments.length),i=0;i<a.length;i++)a[i]=arguments[i];a[0]=t.coerce(a[0]),"string"!=typeof a[0]&&a.unshift("%O");var c=0;a[0]=a[0].replace(/%([a-zA-Z%])/g,(function(r,o){if("%%"===r)return r;c++;var n=t.formatters[o];if("function"==typeof n){var s=a[c];r=n.call(e,s),a.splice(c,1),c--}return r})),t.formatArgs.call(e,a);var u=r.log||t.log||console.log.bind(console);u.apply(e,a)}}return r.namespace=e,r.enabled=t.enabled(e),r.useColors=t.useColors(),r.color=function(e){var r,o=0;for(r in e)o=(o<<5)-o+e.charCodeAt(r),o|=0;return t.colors[Math.abs(o)%t.colors.length]}(e),"function"==typeof t.init&&t.init(r),r}(t=e.exports=n.debug=n.default=n).coerce=function(e){return e instanceof Error?e.stack||e.message:e},t.disable=function(){t.enable("")},t.enable=function(e){t.save(e),t.names=[],t.skips=[];for(var r=("string"==typeof e?e:"").split(/[\s,]+/),o=r.length,n=0;n<o;n++)r[n]&&("-"===(e=r[n].replace(/\*/g,".*?"))[0]?t.skips.push(new RegExp("^"+e.substr(1)+"$")):t.names.push(new RegExp("^"+e+"$")))},t.enabled=function(e){var r,o;for(r=0,o=t.skips.length;r<o;r++)if(t.skips[r].test(e))return!1;for(r=0,o=t.names.length;r<o;r++)if(t.names[r].test(e))return!0;return!1},t.humanize=r(430),t.names=[],t.skips=[],t.formatters={}},430:function(e,t){var r=1e3,o=6e4,n=60*o,s=24*n;function a(e,t,r){if(!(e<t))return e<1.5*t?Math.floor(e/t)+" "+r:Math.ceil(e/t)+" "+r+"s"}e.exports=function(e,t){t=t||{};var i,c=typeof e;if("string"===c&&e.length>0)return function(e){if((e=String(e)).length>100)return;var t=/^((?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|years?|yrs?|y)?$/i.exec(e);if(!t)return;var a=parseFloat(t[1]);switch((t[2]||"ms").toLowerCase()){case"years":case"year":case"yrs":case"yr":case"y":return 315576e5*a;case"days":case"day":case"d":return a*s;case"hours":case"hour":case"hrs":case"hr":case"h":return a*n;case"minutes":case"minute":case"mins":case"min":case"m":return a*o;case"seconds":case"second":case"secs":case"sec":case"s":return a*r;case"milliseconds":case"millisecond":case"msecs":case"msec":case"ms":return a;default:return}}(e);if("number"===c&&!1===isNaN(e))return t.long?a(i=e,s,"day")||a(i,n,"hour")||a(i,o,"minute")||a(i,r,"second")||i+" ms":function(e){if(e>=s)return Math.round(e/s)+"d";if(e>=n)return Math.round(e/n)+"h";if(e>=o)return Math.round(e/o)+"m";if(e>=r)return Math.round(e/r)+"s";return e+"ms"}(e);throw new Error("val is not a non-empty string or a valid number. val="+JSON.stringify(e))}},431:function(e,t,r){"use strict";var o=r(432),n=o.Promise||r(433).Promise;e.exports=function(e,t){var s=r(373),a=r(342),i=r(434),c=r(435),u=r(436);function l(e,t,o){return(o=r(336)(o||{}))._ua=o._ua||l.ua,new h(e,t,o)}t=t||"",l.version=r(439),l.ua="Algolia for JavaScript ("+l.version+"); "+t,l.initPlaces=u(l),o.__algolia={debug:r(345),algoliasearch:l};var p={hasXMLHttpRequest:"XMLHttpRequest"in o,hasXDomainRequest:"XDomainRequest"in o};function h(){e.apply(this,arguments)}return p.hasXMLHttpRequest&&(p.cors="withCredentials"in new XMLHttpRequest),s(h,e),h.prototype._request=function(e,t){return new n((function(r,o){if(p.cors||p.hasXDomainRequest){e=i(e,t.headers);var n,s,c=t.body,u=p.cors?new XMLHttpRequest:new XDomainRequest,l=!1;n=setTimeout(h,t.timeouts.connect),u.onprogress=function(){l||f()},"onreadystatechange"in u&&(u.onreadystatechange=function(){!l&&u.readyState>1&&f()}),u.onload=function(){if(s)return;var e;clearTimeout(n);try{e={body:JSON.parse(u.responseText),responseText:u.responseText,statusCode:u.status,headers:u.getAllResponseHeaders&&u.getAllResponseHeaders()||{}}}catch(t){e=new a.UnparsableJSON({more:u.responseText})}e instanceof a.UnparsableJSON?o(e):r(e)},u.onerror=function(e){if(s)return;clearTimeout(n),o(new a.Network({more:e}))},u instanceof XMLHttpRequest?(u.open(t.method,e,!0),t.forceAuthHeaders&&(u.setRequestHeader("x-algolia-application-id",t.headers["x-algolia-application-id"]),u.setRequestHeader("x-algolia-api-key",t.headers["x-algolia-api-key"]))):u.open(t.method,e),p.cors&&(c&&("POST"===t.method?u.setRequestHeader("content-type","application/x-www-form-urlencoded"):u.setRequestHeader("content-type","application/json")),u.setRequestHeader("accept","application/json")),c?u.send(c):u.send()}else o(new a.Network("CORS not supported"));function h(){s=!0,u.abort(),o(new a.RequestTimeout)}function f(){l=!0,clearTimeout(n),n=setTimeout(h,t.timeouts.complete)}}))},h.prototype._request.fallback=function(e,t){return e=i(e,t.headers),new n((function(r,o){c(e,t,(function(e,t){e?o(e):r(t)}))}))},h.prototype._promise={reject:function(e){return n.reject(e)},resolve:function(e){return n.resolve(e)},delay:function(e){return new n((function(t){setTimeout(t,e)}))},all:function(e){return n.all(e)}},l}},432:function(e,t,r){(function(t){var r;r="undefined"!=typeof window?window:void 0!==t?t:"undefined"!=typeof self?self:{},e.exports=r}).call(this,r(52))},433:function(e,t,r){(function(t,r){var o;o=function(){"use strict";function e(e){return"function"==typeof e}var o=Array.isArray?Array.isArray:function(e){return"[object Array]"===Object.prototype.toString.call(e)},n=0,s=void 0,a=void 0,i=function(e,t){d[n]=e,d[n+1]=t,2===(n+=2)&&(a?a(y):w())},c="undefined"!=typeof window?window:void 0,u=c||{},l=u.MutationObserver||u.WebKitMutationObserver,p="undefined"==typeof self&&void 0!==t&&"[object process]"==={}.toString.call(t),h="undefined"!=typeof Uint8ClampedArray&&"undefined"!=typeof importScripts&&"undefined"!=typeof MessageChannel;function f(){var e=setTimeout;return function(){return e(y,1)}}var d=new Array(1e3);function y(){for(var e=0;e<n;e+=2)(0,d[e])(d[e+1]),d[e]=void 0,d[e+1]=void 0;n=0}var m,g,v,b,w=void 0;function _(e,t){var r=this,o=new this.constructor(S);void 0===o[T]&&E(o);var n=r._state;if(n){var s=arguments[n-1];i((function(){return q(n,o,s,r._result)}))}else C(r,o,e,t);return o}function x(e){if(e&&"object"==typeof e&&e.constructor===this)return e;var t=new this(S);return j(t,e),t}p?w=function(){return t.nextTick(y)}:l?(g=0,v=new l(y),b=document.createTextNode(""),v.observe(b,{characterData:!0}),w=function(){b.data=g=++g%2}):h?((m=new MessageChannel).port1.onmessage=y,w=function(){return m.port2.postMessage(0)}):w=void 0===c?function(){try{var e=Function("return this")().require("vertx");return void 0!==(s=e.runOnLoop||e.runOnContext)?function(){s(y)}:f()}catch(t){return f()}}():f();var T=Math.random().toString(36).substring(2);function S(){}function A(t,r,o){r.constructor===t.constructor&&o===_&&r.constructor.resolve===x?function(e,t){1===t._state?k(e,t._result):2===t._state?R(e,t._result):C(t,void 0,(function(t){return j(e,t)}),(function(t){return R(e,t)}))}(t,r):void 0===o?k(t,r):e(o)?function(e,t,r){i((function(e){var o=!1,n=function(e,t,r,o){try{e.call(t,r,o)}catch(n){return n}}(r,t,(function(r){o||(o=!0,t!==r?j(e,r):k(e,r))}),(function(t){o||(o=!0,R(e,t))}),e._label);!o&&n&&(o=!0,R(e,n))}),e)}(t,r,o):k(t,r)}function j(e,t){if(e===t)R(e,new TypeError("You cannot resolve a promise with itself"));else if(n=typeof(o=t),null===o||"object"!==n&&"function"!==n)k(e,t);else{var r=void 0;try{r=t.then}catch(s){return void R(e,s)}A(e,t,r)}var o,n}function | (e){e._onerror&&e._onerror(e._result),I(e)}function k(e,t){void 0===e._state&&(e._result=t,e._state=1,0!==e._subscribers.length&&i(I,e))}function R(e,t){void 0===e._state&&(e._state=2,e._result=t,i(O,e))}function C(e,t,r,o){var n=e._subscribers,s=n.length;e._onerror=null,n[s]=t,n[s+1]=r,n[s+2]=o,0===s&&e._state&&i(I,e)}function I(e){var t=e._subscribers,r=e._state;if(0!==t.length){for(var o=void 0,n=void 0,s=e._result,a=0;a<t.length;a+=3)o=t[a],n=t[a+r],o?q(r,o,n,s):n(s);e._subscribers.length=0}}function q(t,r,o,n){var s=e(o),a=void 0,i=void 0,c=!0;if(s){try{a=o(n)}catch(u){c=!1,i=u}if(r===a)return void R(r,new TypeError("A promises callback cannot return that same promise."))}else a=n;void 0!==r._state||(s&&c?j(r,a):!1===c?R(r,i):1===t?k(r,a):2===t&&R(r,a))}var N=0;function E(e){e[T]=N++,e._state=void 0,e._result=void 0,e._subscribers=[]}var P=function(){function e(e,t){this._instanceConstructor=e,this.promise=new e(S),this.promise[T]||E(this.promise),o(t)?(this.length=t.length,this._remaining=t.length,this._result=new Array(this.length),0===this.length?k(this.promise,this._result):(this.length=this.length||0,this._enumerate(t),0===this._remaining&&k(this.promise,this._result))):R(this.promise,new Error("Array Methods must be provided an Array"))}return e.prototype._enumerate=function(e){for(var t=0;void 0===this._state&&t<e.length;t++)this._eachEntry(e[t],t)},e.prototype._eachEntry=function(e,t){var r=this._instanceConstructor,o=r.resolve;if(o===x){var n=void 0,s=void 0,a=!1;try{n=e.then}catch(c){a=!0,s=c}if(n===_&&void 0!==e._state)this._settledAt(e._state,t,e._result);else if("function"!=typeof n)this._remaining--,this._result[t]=e;else if(r===U){var i=new r(S);a?R(i,s):A(i,e,n),this._willSettleAt(i,t)}else this._willSettleAt(new r((function(t){return t(e)})),t)}else this._willSettleAt(o(e),t)},e.prototype._settledAt=function(e,t,r){var o=this.promise;void 0===o._state&&(this._remaining--,2===e?R(o,r):this._result[t]=r),0===this._remaining&&k(o,this._result)},e.prototype._willSettleAt=function(e,t){var r=this;C(e,void 0,(function(e){return r._settledAt(1,t,e)}),(function(e){return r._settledAt(2,t,e)}))},e}(),U=function(){function t(e){this[T]=N++,this._result=this._state=void 0,this._subscribers=[],S!==e&&("function"!=typeof e&&function(){throw new TypeError("You must pass a resolver function as the first argument to the promise constructor")}(),this instanceof t?function(e,t){try{t((function(t){j(e,t)}),(function(t){R(e,t)}))}catch(r){R(e,r)}}(this,e):function(){throw new TypeError("Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.")}())}return t.prototype.catch=function(e){return this.then(null,e)},t.prototype.finally=function(t){var r=this.constructor;return e(t)?this.then((function(e){return r.resolve(t()).then((function(){return e}))}),(function(e){return r.resolve(t()).then((function(){throw e}))})):this.then(t,t)},t}();return U.prototype.then=_,U.all=function(e){return new P(this,e).promise},U.race=function(e){var t=this;return o(e)?new t((function(r,o){for(var n=e.length,s=0;s<n;s++)t.resolve(e[s]).then(r,o)})):new t((function(e,t){return t(new TypeError("You must pass an array to race."))}))},U.resolve=x,U.reject=function(e){var t=new this(S);return R(t,e),t},U._setScheduler=function(e){a=e},U._setAsap=function(e){i=e},U._asap=i,U.polyfill=function(){var e=void 0;if(void 0!==r)e=r;else if("undefined"!=typeof self)e=self;else try{e=Function("return this")()}catch(n){throw new Error("polyfill failed because global object is unavailable in this environment")}var t=e.Promise;if(t){var o=null;try{o=Object.prototype.toString.call(t.resolve())}catch(n){}if("[object Promise]"===o&&!t.cast)return}e.Promise=U},U.Promise=U,U},e.exports=o()}).call(this,r(341),r(52))},434:function(e,t,r){"use strict";e.exports=function(e,t){/\?/.test(e)?e+="&":e+="?";return e+o(t)};var o=r(377)},435:function(e,t,r){"use strict";e.exports=function(e,t,r){if("GET"!==t.method)return void r(new Error("Method "+t.method+" "+e+" is not supported by JSONP."));t.debug("JSONP: start");var s=!1,a=!1;n+=1;var i=document.getElementsByTagName("head")[0],c=document.createElement("script"),u="algoliaJSONP_"+n,l=!1;window[u]=function(e){!function(){try{delete window[u],delete window[u+"_loaded"]}catch(e){window[u]=window[u+"_loaded"]=void 0}}(),a?t.debug("JSONP: Late answer, ignoring"):(s=!0,f(),r(null,{body:e,responseText:JSON.stringify(e)}))},e+="&callback="+u,t.jsonBody&&t.jsonBody.params&&(e+="&"+t.jsonBody.params);var p=setTimeout((function(){t.debug("JSONP: Script timeout"),a=!0,f(),r(new o.RequestTimeout)}),t.timeouts.complete);function h(){t.debug("JSONP: success"),l||a||(l=!0,s||(t.debug("JSONP: Fail. Script loaded but did not call the callback"),f(),r(new o.JSONPScriptFail)))}function f(){clearTimeout(p),c.onload=null,c.onreadystatechange=null,c.onerror=null,i.removeChild(c)}c.onreadystatechange=function(){"loaded"!==this.readyState&&"complete"!==this.readyState||h()},c.onload=h,c.onerror=function(){t.debug("JSONP: Script error"),l||a||(f(),r(new o.JSONPScriptError))},c.async=!0,c.defer=!0,c.src=e,i.appendChild(c)};var o=r(342),n=0},436:function(e,t,r){e.exports=function(e){return function(t,s,a){var i=r(336);(a=a&&i(a)||{}).hosts=a.hosts||["places-dsn.algolia.net","places-1.algolianet.com","places-2.algolianet.com","places-3.algolianet.com"],0!==arguments.length&&"object"!=typeof t&&void 0!==t||(t="",s="",a._allowEmptyCredentials=!0);var c=e(t,s,a),u=c.initIndex("places");return u.search=n("query","/1/places/query"),u.reverse=function(e,t){var r=o.encode(e);return this.as._jsonRequest({method:"GET",url:"/1/places/reverse?"+r,hostType:"read",callback:t})},u.getObject=function(e,t){return this.as._jsonRequest({method:"GET",url:"/1/places/"+encodeURIComponent(e),hostType:"read",callback:t})},u}};var o=r(437),n=r(374)},437:function(e,t,r){"use strict";t.decode=t.parse=r(438),t.encode=t.stringify=r(377)},438:function(e,t,r){"use strict";function o(e,t){return Object.prototype.hasOwnProperty.call(e,t)}e.exports=function(e,t,r,s){t=t||"&",r=r||"=";var a={};if("string"!=typeof e||0===e.length)return a;var i=/\+/g;e=e.split(t);var c=1e3;s&&"number"==typeof s.maxKeys&&(c=s.maxKeys);var u=e.length;c>0&&u>c&&(u=c);for(var l=0;l<u;++l){var p,h,f,d,y=e[l].replace(i,"%20"),m=y.indexOf(r);m>=0?(p=y.substr(0,m),h=y.substr(m+1)):(p=y,h=""),f=decodeURIComponent(p),d=decodeURIComponent(h),o(a,f)?n(a[f])?a[f].push(d):a[f]=[a[f],d]:a[f]=d}return a};var n=Array.isArray||function(e){return"[object Array]"===Object.prototype.toString.call(e)}},439:function(e,t,r){"use strict";e.exports="3.35.1"}}]); | O |
_validators.py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import re
import shlex
from knack.util import CLIError
import azext_alias
from azext_alias.argument import get_placeholders
from azext_alias._const import (
COLLISION_CHECK_LEVEL_DEPTH,
INVALID_ALIAS_COMMAND_ERROR,
EMPTY_ALIAS_ERROR,
INVALID_STARTING_CHAR_ERROR,
INCONSISTENT_ARG_ERROR,
COMMAND_LVL_ERROR
)
from azext_alias.alias import AliasManager
def process_alias_create_namespace(namespace):
"""
Validate input arguments when the user invokes 'az alias create'.
Args:
namespace: argparse namespace object.
"""
_validate_alias_name(namespace.alias_name)
_validate_alias_command(namespace.alias_command)
_validate_alias_command_level(namespace.alias_name, namespace.alias_command)
_validate_pos_args_syntax(namespace.alias_name, namespace.alias_command)
def _validate_alias_name(alias_name):
""" | alias_name: The name of the alias to validate.
"""
if not alias_name:
raise CLIError(EMPTY_ALIAS_ERROR)
if not re.match('^[a-zA-Z]', alias_name):
raise CLIError(INVALID_STARTING_CHAR_ERROR.format(alias_name[0]))
def _validate_alias_command(alias_command):
"""
Check if the alias command is valid.
Args:
alias_command: The command to validate.
"""
if not alias_command:
raise CLIError(EMPTY_ALIAS_ERROR)
# Boundary index is the index at which named argument or positional argument starts
split_command = shlex.split(alias_command)
boundary_index = len(split_command)
for i, subcommand in enumerate(split_command):
if not re.match('^[a-z]', subcommand.lower()) or i > COLLISION_CHECK_LEVEL_DEPTH:
boundary_index = i
break
# Extract possible CLI commands and validate
command_to_validate = ' '.join(split_command[:boundary_index]).lower()
for command in azext_alias.cached_reserved_commands:
if re.match(r'([a-z\-]*\s)*{}($|\s)'.format(command_to_validate), command):
return
raise CLIError(INVALID_ALIAS_COMMAND_ERROR.format(command_to_validate if command_to_validate else alias_command))
def _validate_pos_args_syntax(alias_name, alias_command):
"""
Check if the positional argument syntax is valid in alias name and alias command.
Args:
alias_name: The name of the alias to validate.
alias_command: The command to validate.
"""
pos_args_from_alias = get_placeholders(alias_name)
# Split by '|' to extract positional argument name from Jinja filter (e.g. {{ arg_name | upper }})
# Split by '.' to extract positional argument name from function call (e.g. {{ arg_name.split()[0] }})
pos_args_from_command = [x.split('|')[0].split('.')[0].strip() for x in get_placeholders(alias_command)]
if set(pos_args_from_alias) != set(pos_args_from_command):
arg_diff = set(pos_args_from_alias) ^ set(pos_args_from_command)
raise CLIError(INCONSISTENT_ARG_ERROR.format('' if len(arg_diff) == 1 else 's',
arg_diff,
'is' if len(arg_diff) == 1 else 'are'))
def _validate_alias_command_level(alias, command):
"""
Make sure that if the alias is a reserved command, the command that the alias points to
in the command tree does not conflict in levels.
e.g. 'dns' -> 'network dns' is valid because dns is a level 2 command and network dns starts at level 1.
However, 'list' -> 'show' is not valid because list and show are both reserved commands at level 2.
Args:
alias: The name of the alias.
command: The command that the alias points to.
"""
alias_collision_table = AliasManager.build_collision_table([alias], azext_alias.cached_reserved_commands)
# Alias is not a reserved command, so it can point to any command
if not alias_collision_table:
return
command_collision_table = AliasManager.build_collision_table([command], azext_alias.cached_reserved_commands)
alias_collision_levels = alias_collision_table.get(alias.split()[0], [])
command_collision_levels = command_collision_table.get(command.split()[0], [])
# Check if there is a command level conflict
if set(alias_collision_levels) & set(command_collision_levels):
raise CLIError(COMMAND_LVL_ERROR.format(alias, command)) | Check if the alias name is valid.
Args: |
test_integers_min_max.js | // Autogenerated from KST: please remove this line if doing any edits by hand! | var assert = require('assert');
var testHelper = require('testHelper');
testHelper('IntegersMinMax', 'src/integers_min_max.bin', function(r, IntegersMinMax) {
assert.strictEqual(r.unsignedMin.u1, 0);
assert.strictEqual(r.unsignedMin.u2le, 0);
assert.strictEqual(r.unsignedMin.u4le, 0);
assert.strictEqual(r.unsignedMin.u8le, 0);
assert.strictEqual(r.unsignedMin.u2be, 0);
assert.strictEqual(r.unsignedMin.u4be, 0);
assert.strictEqual(r.unsignedMin.u8be, 0);
assert.strictEqual(r.unsignedMax.u1, 255);
assert.strictEqual(r.unsignedMax.u2le, 65535);
assert.strictEqual(r.unsignedMax.u4le, 4294967295);
assert.strictEqual(r.unsignedMax.u8le, 18446744073709551615);
assert.strictEqual(r.unsignedMax.u2be, 65535);
assert.strictEqual(r.unsignedMax.u4be, 4294967295);
assert.strictEqual(r.unsignedMax.u8be, 18446744073709551615);
assert.strictEqual(r.signedMin.s1, -128);
assert.strictEqual(r.signedMin.s2le, -32768);
assert.strictEqual(r.signedMin.s4le, -2147483648);
assert.strictEqual(r.signedMin.s8le, -9223372036854775808);
assert.strictEqual(r.signedMin.s2be, -32768);
assert.strictEqual(r.signedMin.s4be, -2147483648);
assert.strictEqual(r.signedMin.s8be, -9223372036854775808);
assert.strictEqual(r.signedMax.s1, 127);
assert.strictEqual(r.signedMax.s2le, 32767);
assert.strictEqual(r.signedMax.s4le, 2147483647);
assert.strictEqual(r.signedMax.s8le, 9223372036854775807);
assert.strictEqual(r.signedMax.s2be, 32767);
assert.strictEqual(r.signedMax.s4be, 2147483647);
assert.strictEqual(r.signedMax.s8be, 9223372036854775807);
}); | |
test_project.py | # -*- coding: utf-8 -*-
from wakatime.main import execute
from wakatime.packages import requests
from wakatime.packages.requests.models import Response
import logging
import os
import shutil
import tempfile
import time
from testfixtures import log_capture
from wakatime.compat import u, open
from wakatime.constants import API_ERROR, SUCCESS
from wakatime.exceptions import NotYetImplemented
from wakatime.project import generate_project_name
from wakatime.projects.base import BaseProject
from wakatime.projects.git import Git
from .utils import ANY, DynamicIterable, TestCase, TemporaryDirectory, CustomResponse, mock, json
class ProjectTestCase(TestCase):
patch_these = [
'wakatime.packages.requests.adapters.HTTPAdapter.send',
'wakatime.offlinequeue.Queue.push',
['wakatime.offlinequeue.Queue.pop', None],
['wakatime.offlinequeue.Queue.connect', None],
'wakatime.session_cache.SessionCache.save',
'wakatime.session_cache.SessionCache.delete',
['wakatime.session_cache.SessionCache.get', requests.session],
['wakatime.session_cache.SessionCache.connect', None],
]
def shared(self, expected_project='', expected_branch=ANY, entity='', config='good_config.cfg', extra_args=[]):
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = CustomResponse()
config = os.path.join('tests/samples/configs', config)
if not os.path.exists(entity):
entity = os.path.realpath(os.path.join('tests/samples', entity))
now = u(int(time.time()))
args = ['--file', entity, '--config', config, '--time', now] + extra_args
retval = execute(args)
self.assertEquals(retval, SUCCESS)
self.assertNothingPrinted()
heartbeat = {
'language': ANY,
'lines': ANY,
'entity': os.path.realpath(entity),
'project': expected_project,
'branch': expected_branch,
'dependencies': ANY,
'time': float(now),
'type': 'file',
'is_write': False,
'user_agent': ANY,
}
self.assertHeartbeatSent(heartbeat)
self.assertHeartbeatNotSavedOffline()
self.assertOfflineHeartbeatsSynced()
self.assertSessionCacheSaved()
def test_project_base(self):
path = 'tests/samples/codefiles/see.h'
project = BaseProject(path)
with self.assertRaises(NotYetImplemented):
project.process()
with self.assertRaises(NotYetImplemented):
project.name()
with self.assertRaises(NotYetImplemented):
project.branch()
def test_project_argument_overrides_detected_project(self):
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
now = u(int(time.time()))
entity = 'tests/samples/projects/git/emptyfile.txt'
config = 'tests/samples/configs/good_config.cfg'
args = ['--project', 'forced-project', '--file', entity, '--config', config, '--time', now]
execute(args)
self.assertEquals('forced-project', self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['project'])
def test_alternate_project_argument_does_not_override_detected_project(self):
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
now = u(int(time.time()))
entity = 'tests/samples/projects/git/emptyfile.txt'
config = 'tests/samples/configs/good_config.cfg'
project = os.path.basename(os.path.abspath('.'))
args = ['--alternate-project', 'alt-project', '--file', entity, '--config', config, '--time', now]
execute(args)
self.assertEquals(project, self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['project'])
def test_alternate_project_argument_does_not_override_project_argument(self):
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
now = u(int(time.time()))
entity = 'tests/samples/projects/git/emptyfile.txt'
config = 'tests/samples/configs/good_config.cfg'
args = ['--project', 'forced-project', '--alternate-project', 'alt-project', '--file', entity, '--config', config, '--time', now]
execute(args)
self.assertEquals('forced-project', self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['project'])
def test_alternate_project_argument_used_when_project_not_detected(self):
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
tempdir = tempfile.mkdtemp()
entity = 'tests/samples/projects/git/emptyfile.txt'
shutil.copy(entity, os.path.join(tempdir, 'emptyfile.txt'))
now = u(int(time.time()))
entity = os.path.join(tempdir, 'emptyfile.txt')
config = 'tests/samples/configs/good_config.cfg'
args = ['--file', entity, '--config', config, '--time', now]
execute(args)
args = ['--file', entity, '--config', config, '--time', now, '--alternate-project', 'alt-project']
execute(args)
calls = self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].call_args_list
body = calls[0][0][0].body
data = json.loads(body)[0]
self.assertEquals(None, data.get('project'))
body = calls[1][0][0].body
data = json.loads(body)[0]
self.assertEquals('alt-project', data['project'])
def test_wakatime_project_file(self):
self.shared(
expected_project='waka-project-file',
entity='projects/wakatime_project_file/emptyfile.txt',
)
def test_wakatime_project_file_used_even_when_project_names_hidden(self):
tempdir = tempfile.mkdtemp()
shutil.copytree('tests/samples/projects/wakatime_project_file', os.path.join(tempdir, 'wakatime_project_file'))
self.shared(
expected_project='waka-project-file',
entity=os.path.join(tempdir, 'wakatime_project_file', 'emptyfile.txt'),
extra_args=['--hide-project-names'],
)
def test_git_project_detected(self):
tempdir = tempfile.mkdtemp()
shutil.copytree('tests/samples/projects/git', os.path.join(tempdir, 'git'))
shutil.move(os.path.join(tempdir, 'git', 'dot_git'), os.path.join(tempdir, 'git', '.git'))
self.shared(
expected_project='git',
expected_branch='master',
entity=os.path.join(tempdir, 'git', 'emptyfile.txt'),
)
def test_git_project_not_used_when_project_names_hidden(self):
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
tempdir = tempfile.mkdtemp()
shutil.copytree('tests/samples/projects/git', os.path.join(tempdir, 'git'))
shutil.move(os.path.join(tempdir, 'git', 'dot_git'), os.path.join(tempdir, 'git', '.git'))
now = u(int(time.time()))
entity = os.path.join(tempdir, 'git', 'emptyfile.txt')
config = 'tests/samples/configs/good_config.cfg'
args = ['--hide-project-names', '--file', entity, '--config', config, '--time', now]
execute(args)
self.assertHeartbeatSavedOffline()
self.assertNotEquals('git', self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['project'])
self.assertEquals(None, self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['branch'])
proj = open(os.path.join(tempdir, 'git', '.wakatime-project')).read()
self.assertEquals(proj, self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['project'])
execute(args)
self.assertEquals(proj, self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['project'])
def test_git_branch_not_used_when_branch_names_hidden(self):
tempdir = tempfile.mkdtemp()
shutil.copytree('tests/samples/projects/git', os.path.join(tempdir, 'git'))
shutil.move(os.path.join(tempdir, 'git', 'dot_git'), os.path.join(tempdir, 'git', '.git'))
self.shared(
expected_project='git',
expected_branch=None,
entity=os.path.join(tempdir, 'git', 'emptyfile.txt'),
extra_args=['--hide-branch-names'],
)
def test_branch_used_when_project_names_hidden_but_branch_names_visible(self):
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
tempdir = tempfile.mkdtemp()
shutil.copytree('tests/samples/projects/git', os.path.join(tempdir, 'git'))
shutil.move(os.path.join(tempdir, 'git', 'dot_git'), os.path.join(tempdir, 'git', '.git'))
now = u(int(time.time()))
entity = os.path.join(tempdir, 'git', 'emptyfile.txt')
config = 'tests/samples/configs/show_branch_names.cfg'
args = ['--hide-project-names', '--file', entity, '--config', config, '--time', now]
execute(args)
self.assertHeartbeatSavedOffline()
self.assertNotEquals('git', self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['project'])
self.assertNotEquals(None, self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['branch'])
proj = open(os.path.join(tempdir, 'git', '.wakatime-project')).read()
self.assertEquals(proj, self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['project'])
execute(args)
self.assertEquals(proj, self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['project'])
@log_capture()
def test_ioerror_when_reading_git_branch(self, logs):
logging.disable(logging.NOTSET)
tempdir = tempfile.mkdtemp()
shutil.copytree('tests/samples/projects/git', os.path.join(tempdir, 'git'))
shutil.move(os.path.join(tempdir, 'git', 'dot_git'), os.path.join(tempdir, 'git', '.git'))
entity = os.path.join(tempdir, 'git', 'emptyfile.txt')
with mock.patch('wakatime.projects.git.open') as mock_open:
mock_open.side_effect = IOError('')
self.shared(
expected_project='git',
expected_branch='master',
entity=entity,
)
self.assertNothingPrinted()
actual = self.getLogOutput(logs)
expected = 'OSError' if self.isPy33OrNewer else 'IOError'
self.assertIn(expected, actual)
def test_git_detached_head_not_used_as_branch(self):
tempdir = tempfile.mkdtemp()
shutil.copytree('tests/samples/projects/git-with-detached-head', os.path.join(tempdir, 'git'))
shutil.move(os.path.join(tempdir, 'git', 'dot_git'), os.path.join(tempdir, 'git', '.git'))
entity = os.path.join(tempdir, 'git', 'emptyfile.txt')
self.shared(
expected_project='git',
expected_branch=None,
entity=entity,
)
def test_svn_project_detected(self):
with mock.patch('wakatime.projects.git.Git.process') as mock_git:
mock_git.return_value = False
with mock.patch('wakatime.projects.subversion.Subversion._has_xcode_tools') as mock_has_xcode:
mock_has_xcode.return_value = True
with mock.patch('wakatime.projects.subversion.Popen.communicate') as mock_popen:
stdout = open('tests/samples/output/svn').read()
stderr = ''
mock_popen.return_value = DynamicIterable((stdout, stderr), max_calls=1)
self.shared(
expected_project='svn',
entity='projects/svn/afolder/emptyfile.txt',
)
@log_capture()
def test_svn_exception_handled(self, logs):
logging.disable(logging.NOTSET)
with mock.patch('wakatime.projects.git.Git.process') as mock_git:
mock_git.return_value = False
| with mock.patch('wakatime.projects.subversion.Popen') as mock_popen:
mock_popen.side_effect = OSError('')
with mock.patch('wakatime.projects.subversion.Popen.communicate') as mock_communicate:
mock_communicate.side_effect = OSError('')
self.shared(
expected_project=None,
entity='projects/svn/afolder/emptyfile.txt',
)
self.assertNothingPrinted()
self.assertNothingLogged(logs)
def test_svn_on_mac_without_xcode_tools_installed(self):
with mock.patch('wakatime.projects.git.Git.process') as mock_git:
mock_git.return_value = False
with mock.patch('wakatime.projects.subversion.platform.system') as mock_system:
mock_system.return_value = 'Darwin'
with mock.patch('wakatime.projects.subversion.Popen.communicate') as mock_popen:
stdout = open('tests/samples/output/svn').read()
stderr = ''
mock_popen.return_value = DynamicIterable((stdout, stderr), raise_on_calls=[OSError('')])
self.shared(
expected_project=None,
entity='projects/svn/afolder/emptyfile.txt',
)
def test_svn_on_mac_with_xcode_tools_installed(self):
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
now = u(int(time.time()))
entity = 'tests/samples/projects/svn/afolder/emptyfile.txt'
config = 'tests/samples/configs/good_config.cfg'
args = ['--file', entity, '--config', config, '--time', now]
with mock.patch('wakatime.projects.git.Git.process') as mock_git:
mock_git.return_value = False
with mock.patch('wakatime.projects.subversion.platform.system') as mock_system:
mock_system.return_value = 'Darwin'
with mock.patch('wakatime.projects.subversion.Popen') as mock_popen:
stdout = open('tests/samples/output/svn').read()
stderr = ''
class Dynamic(object):
def __init__(self):
self.called = 0
def communicate(self):
self.called += 1
if self.called == 2:
return (stdout, stderr)
def wait(self):
if self.called == 1:
return 0
mock_popen.return_value = Dynamic()
execute(args)
self.assertEquals('svn', self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['project'])
def test_mercurial_project_detected(self):
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
with mock.patch('wakatime.projects.git.Git.process') as mock_git:
mock_git.return_value = False
now = u(int(time.time()))
entity = 'tests/samples/projects/hg/emptyfile.txt'
config = 'tests/samples/configs/good_config.cfg'
args = ['--file', entity, '--config', config, '--time', now]
execute(args)
self.assertEquals('hg', self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['project'])
self.assertEquals('test-hg-branch', self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['branch'])
def test_mercurial_project_branch_with_slash_detected(self):
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
with mock.patch('wakatime.projects.git.Git.process') as mock_git:
mock_git.return_value = False
now = u(int(time.time()))
entity = 'tests/samples/projects/hg-branch-with-slash/emptyfile.txt'
config = 'tests/samples/configs/good_config.cfg'
args = ['--file', entity, '--config', config, '--time', now]
execute(args)
self.assertEquals('hg-branch-with-slash', self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['project'])
self.assertEquals('branch/with/slash', self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['branch'])
@log_capture()
def test_ioerror_when_reading_mercurial_branch(self, logs):
logging.disable(logging.NOTSET)
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
with mock.patch('wakatime.projects.git.Git.process') as mock_git:
mock_git.return_value = False
now = u(int(time.time()))
entity = 'tests/samples/projects/hg/emptyfile.txt'
config = 'tests/samples/configs/good_config.cfg'
args = ['--file', entity, '--config', config, '--time', now]
with mock.patch('wakatime.projects.mercurial.open') as mock_open:
mock_open.side_effect = IOError('')
execute(args)
self.assertEquals('hg', self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['project'])
self.assertEquals('default', self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['branch'])
self.assertNothingPrinted()
actual = self.getLogOutput(logs)
expected = 'OSError' if self.isPy33OrNewer else 'IOError'
self.assertIn(expected, actual)
def test_git_submodule_detected(self):
tempdir = tempfile.mkdtemp()
shutil.copytree('tests/samples/projects/git-with-submodule', os.path.join(tempdir, 'git'))
shutil.move(os.path.join(tempdir, 'git', 'dot_git'), os.path.join(tempdir, 'git', '.git'))
shutil.move(os.path.join(tempdir, 'git', 'asubmodule', 'dot_git'), os.path.join(tempdir, 'git', 'asubmodule', '.git'))
entity = os.path.join(tempdir, 'git', 'asubmodule', 'emptyfile.txt')
self.shared(
expected_project='asubmodule',
expected_branch='asubbranch',
entity=entity,
)
def test_git_submodule_without_option(self):
tempdir = tempfile.mkdtemp()
shutil.copytree('tests/samples/projects/git-with-submodule', os.path.join(tempdir, 'git'))
shutil.move(os.path.join(tempdir, 'git', 'dot_git'), os.path.join(tempdir, 'git', '.git'))
shutil.move(os.path.join(tempdir, 'git', 'asubmodule', 'dot_git'), os.path.join(tempdir, 'git', 'asubmodule', '.git'))
entity = os.path.join(tempdir, 'git', 'asubmodule', 'emptyfile.txt')
self.shared(
expected_project='asubmodule',
expected_branch='asubbranch',
entity=entity,
config='git-submodules-without-option.cfg',
)
def test_git_submodule_detected_and_enabled_globally(self):
tempdir = tempfile.mkdtemp()
shutil.copytree('tests/samples/projects/git-with-submodule', os.path.join(tempdir, 'git'))
shutil.move(os.path.join(tempdir, 'git', 'dot_git'), os.path.join(tempdir, 'git', '.git'))
shutil.move(os.path.join(tempdir, 'git', 'asubmodule', 'dot_git'), os.path.join(tempdir, 'git', 'asubmodule', '.git'))
entity = os.path.join(tempdir, 'git', 'asubmodule', 'emptyfile.txt')
self.shared(
expected_project='asubmodule',
expected_branch='asubbranch',
entity=entity,
config='git-submodules-enabled.cfg',
)
def test_git_submodule_detected_but_disabled_globally(self):
tempdir = tempfile.mkdtemp()
shutil.copytree('tests/samples/projects/git-with-submodule', os.path.join(tempdir, 'git'))
shutil.move(os.path.join(tempdir, 'git', 'dot_git'), os.path.join(tempdir, 'git', '.git'))
shutil.move(os.path.join(tempdir, 'git', 'asubmodule', 'dot_git'), os.path.join(tempdir, 'git', 'asubmodule', '.git'))
entity = os.path.join(tempdir, 'git', 'asubmodule', 'emptyfile.txt')
self.shared(
expected_project='git',
expected_branch='master',
entity=entity,
config='git-submodules-disabled.cfg',
)
def test_git_submodule_detected_and_disabled_using_regex(self):
tempdir = tempfile.mkdtemp()
shutil.copytree('tests/samples/projects/git-with-submodule', os.path.join(tempdir, 'git'))
shutil.move(os.path.join(tempdir, 'git', 'dot_git'), os.path.join(tempdir, 'git', '.git'))
shutil.move(os.path.join(tempdir, 'git', 'asubmodule', 'dot_git'), os.path.join(tempdir, 'git', 'asubmodule', '.git'))
entity = os.path.join(tempdir, 'git', 'asubmodule', 'emptyfile.txt')
self.shared(
expected_project='git',
expected_branch='master',
entity=entity,
config='git-submodules-disabled-using-regex.cfg',
)
def test_git_submodule_detected_and_enabled_using_regex(self):
tempdir = tempfile.mkdtemp()
shutil.copytree('tests/samples/projects/git-with-submodule', os.path.join(tempdir, 'git'))
shutil.move(os.path.join(tempdir, 'git', 'dot_git'), os.path.join(tempdir, 'git', '.git'))
shutil.move(os.path.join(tempdir, 'git', 'asubmodule', 'dot_git'), os.path.join(tempdir, 'git', 'asubmodule', '.git'))
entity = os.path.join(tempdir, 'git', 'asubmodule', 'emptyfile.txt')
self.shared(
expected_project='asubmodule',
expected_branch='asubbranch',
entity=entity,
config='git-submodules-enabled-using-regex.cfg',
)
@log_capture()
def test_git_submodule_detected_with_invalid_regex(self, logs):
logging.disable(logging.NOTSET)
tempdir = tempfile.mkdtemp()
shutil.copytree('tests/samples/projects/git-with-submodule', os.path.join(tempdir, 'git'))
shutil.move(os.path.join(tempdir, 'git', 'dot_git'), os.path.join(tempdir, 'git', '.git'))
shutil.move(os.path.join(tempdir, 'git', 'asubmodule', 'dot_git'), os.path.join(tempdir, 'git', 'asubmodule', '.git'))
entity = os.path.join(tempdir, 'git', 'asubmodule', 'emptyfile.txt')
self.shared(
expected_project='git',
expected_branch='master',
entity=entity,
config='git-submodules-invalid-regex.cfg',
)
self.assertNothingPrinted()
actual = self.getLogOutput(logs)
expected = u('WakaTime WARNING Regex error (unbalanced parenthesis) for disable git submodules pattern: \\(invalid regex)')
if self.isPy35OrNewer:
expected = 'WakaTime WARNING Regex error (unbalanced parenthesis at position 15) for disable git submodules pattern: \\(invalid regex)'
self.assertEquals(expected, actual)
def test_git_worktree_detected(self):
tempdir = tempfile.mkdtemp()
shutil.copytree('tests/samples/projects/git-worktree', os.path.join(tempdir, 'git-wt'))
shutil.copytree('tests/samples/projects/git', os.path.join(tempdir, 'git'))
shutil.move(os.path.join(tempdir, 'git-wt', 'dot_git'), os.path.join(tempdir, 'git-wt', '.git'))
shutil.move(os.path.join(tempdir, 'git', 'dot_git'), os.path.join(tempdir, 'git', '.git'))
entity = os.path.join(tempdir, 'git-wt', 'emptyfile.txt')
self.shared(
expected_project='git',
expected_branch='worktree-detection-branch',
entity=entity,
)
def test_git_worktree_not_detected_when_commondir_missing(self):
tempdir = tempfile.mkdtemp()
shutil.copytree('tests/samples/projects/git-worktree', os.path.join(tempdir, 'git-wt'))
shutil.copytree('tests/samples/projects/git', os.path.join(tempdir, 'git'))
shutil.move(os.path.join(tempdir, 'git-wt', 'dot_git'), os.path.join(tempdir, 'git-wt', '.git'))
shutil.move(os.path.join(tempdir, 'git', 'dot_git'), os.path.join(tempdir, 'git', '.git'))
os.remove(os.path.join(tempdir, 'git', '.git', 'worktrees', 'git-worktree', 'commondir'))
entity = os.path.join(tempdir, 'git-wt', 'emptyfile.txt')
self.shared(
expected_project=None,
expected_branch='worktree-detection-branch',
entity=entity,
)
@log_capture()
def test_git_path_from_gitdir_link_file(self, logs):
logging.disable(logging.NOTSET)
tempdir = tempfile.mkdtemp()
shutil.copytree('tests/samples/projects/git-with-submodule', os.path.join(tempdir, 'git'))
shutil.move(os.path.join(tempdir, 'git', 'dot_git'), os.path.join(tempdir, 'git', '.git'))
shutil.move(os.path.join(tempdir, 'git', 'asubmodule', 'dot_git'), os.path.join(tempdir, 'git', 'asubmodule', '.git'))
path = os.path.join(tempdir, 'git', 'asubmodule')
git = Git(None)
result = git._path_from_gitdir_link_file(path)
expected = os.path.realpath(os.path.join(tempdir, 'git', '.git', 'modules', 'asubmodule'))
self.assertEquals(expected, result)
self.assertNothingPrinted()
self.assertNothingLogged(logs)
@log_capture()
def test_git_path_from_gitdir_link_file_handles_exceptions(self, logs):
logging.disable(logging.NOTSET)
tempdir = tempfile.mkdtemp()
shutil.copytree('tests/samples/projects/git-with-submodule', os.path.join(tempdir, 'git'))
shutil.move(os.path.join(tempdir, 'git', 'dot_git'), os.path.join(tempdir, 'git', '.git'))
shutil.move(os.path.join(tempdir, 'git', 'asubmodule', 'dot_git'), os.path.join(tempdir, 'git', 'asubmodule', '.git'))
self.orig_open = open
self.count = 0
with mock.patch('wakatime.projects.git.open') as mock_open:
def side_effect_function(*args, **kwargs):
self.count += 1
if self.count <= 1:
raise IOError('')
return self.orig_open(*args, **kwargs)
mock_open.side_effect = side_effect_function
git = Git(None)
path = os.path.join(tempdir, 'git', 'asubmodule')
result = git._path_from_gitdir_link_file(path)
expected = os.path.realpath(os.path.join(tempdir, 'git', '.git', 'modules', 'asubmodule'))
self.assertEquals(expected, result)
self.assertNothingPrinted()
self.assertNothingLogged(logs)
with mock.patch('wakatime.projects.git.open') as mock_open:
mock_open.side_effect = UnicodeDecodeError('utf8', ''.encode('utf8'), 0, 0, '')
git = Git(None)
path = os.path.join(tempdir, 'git', 'asubmodule')
result = git._path_from_gitdir_link_file(path)
self.assertIsNone(result)
self.assertNothingPrinted()
actual = self.getLogOutput(logs)
expected = 'UnicodeDecodeError'
self.assertIn(expected, actual)
with mock.patch('wakatime.projects.git.open') as mock_open:
mock_open.side_effect = IOError('')
git = Git(None)
path = os.path.join(tempdir, 'git', 'asubmodule')
result = git._path_from_gitdir_link_file(path)
self.assertIsNone(result)
self.assertNothingPrinted()
actual = self.getLogOutput(logs)
expected = 'OSError' if self.isPy33OrNewer else 'IOError'
self.assertIn(expected, actual)
@log_capture()
def test_git_path_from_gitdir_link_file_handles_invalid_link(self, logs):
logging.disable(logging.NOTSET)
tempdir = tempfile.mkdtemp()
shutil.copytree('tests/samples/projects/git-with-submodule', os.path.join(tempdir, 'git'))
shutil.move(os.path.join(tempdir, 'git', 'asubmodule', 'dot_git'), os.path.join(tempdir, 'git', 'asubmodule', '.git'))
path = os.path.join(tempdir, 'git', 'asubmodule')
git = Git(None)
result = git._path_from_gitdir_link_file(path)
expected = None
self.assertEquals(expected, result)
self.assertNothingPrinted()
self.assertNothingLogged(logs)
def test_git_branch_with_slash(self):
tempdir = tempfile.mkdtemp()
shutil.copytree('tests/samples/projects/git-branch-with-slash', os.path.join(tempdir, 'git'))
shutil.move(os.path.join(tempdir, 'git', 'dot_git'), os.path.join(tempdir, 'git', '.git'))
self.shared(
expected_project='git',
expected_branch='branch/with/slash',
entity=os.path.join(tempdir, 'git', 'emptyfile.txt'),
)
@log_capture()
def test_project_map(self, logs):
logging.disable(logging.NOTSET)
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
now = u(int(time.time()))
entity = 'tests/samples/projects/project_map/emptyfile.txt'
config = 'tests/samples/configs/project_map.cfg'
args = ['--file', entity, '--config', config, '--time', now]
execute(args)
self.assertEquals('proj-map', self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['project'])
self.assertNothingPrinted()
self.assertNothingLogged(logs)
@log_capture()
def test_project_map_group_usage(self, logs):
logging.disable(logging.NOTSET)
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
now = u(int(time.time()))
entity = 'tests/samples/projects/project_map42/emptyfile.txt'
config = 'tests/samples/configs/project_map.cfg'
args = ['--file', entity, '--config', config, '--time', now]
execute(args)
self.assertEquals('proj-map42', self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['project'])
self.assertNothingPrinted()
self.assertNothingLogged(logs)
@log_capture()
def test_project_map_with_invalid_regex(self, logs):
logging.disable(logging.NOTSET)
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = CustomResponse()
now = u(int(time.time()))
entity = 'tests/samples/projects/project_map42/emptyfile.txt'
config = 'tests/samples/configs/project_map_invalid.cfg'
args = ['--file', entity, '--config', config, '--time', now]
retval = execute(args)
self.assertEquals(retval, SUCCESS)
self.assertNothingPrinted()
actual = self.getLogOutput(logs)
expected = u('WakaTime WARNING Regex error (unexpected end of regular expression) for projectmap pattern: invalid[({regex')
if self.isPy35OrNewer:
expected = u('WakaTime WARNING Regex error (unterminated character set at position 7) for projectmap pattern: invalid[({regex')
self.assertEquals(expected, actual)
@log_capture()
def test_project_map_with_replacement_group_index_error(self, logs):
logging.disable(logging.NOTSET)
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
now = u(int(time.time()))
entity = 'tests/samples/projects/project_map42/emptyfile.txt'
config = 'tests/samples/configs/project_map_malformed.cfg'
args = ['--file', entity, '--config', config, '--time', now]
retval = execute(args)
self.assertEquals(retval, API_ERROR)
self.assertNothingPrinted()
actual = self.getLogOutput(logs)
expected = u('WakaTime WARNING Regex error (tuple index out of range) for projectmap pattern: proj-map{3}')
self.assertEquals(expected, actual)
@log_capture()
def test_project_map_allows_duplicate_keys(self, logs):
logging.disable(logging.NOTSET)
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
now = u(int(time.time()))
entity = 'tests/samples/projects/project_map/emptyfile.txt'
config = 'tests/samples/configs/project_map_with_duplicate_keys.cfg'
args = ['--file', entity, '--config', config, '--time', now]
execute(args)
self.assertEquals('proj-map-duplicate-5', self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['project'])
self.assertNothingPrinted()
self.assertNothingLogged(logs)
@log_capture()
def test_project_map_allows_colon_in_key(self, logs):
logging.disable(logging.NOTSET)
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
now = u(int(time.time()))
entity = 'tests/samples/projects/project_map/emptyfile.txt'
config = 'tests/samples/configs/project_map_with_colon_in_key.cfg'
args = ['--file', entity, '--config', config, '--time', now]
execute(args)
self.assertEquals('proj-map-match', self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['project'])
self.assertNothingPrinted()
self.assertNothingLogged(logs)
@log_capture()
def test_exclude_unknown_project_when_project_detected(self, logs):
logging.disable(logging.NOTSET)
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
with TemporaryDirectory() as tempdir:
entity = 'tests/samples/codefiles/emptyfile.txt'
shutil.copy(entity, os.path.join(tempdir, 'emptyfile.txt'))
entity = os.path.realpath(os.path.join(tempdir, 'emptyfile.txt'))
config = 'tests/samples/configs/exclude_unknown_project.cfg'
args = ['--file', entity, '--project', 'proj-arg', '--config', config, '--log-file', '~/.wakatime.log']
execute(args)
self.assertNothingPrinted()
self.assertNothingLogged(logs)
self.assertEquals('proj-arg', self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['project'])
def test_generate_project_name(self):
self.assertGreater(len(generate_project_name()), 1)
self.assertNotEqual(generate_project_name(), generate_project_name()) | with mock.patch('wakatime.projects.subversion.Subversion._has_xcode_tools') as mock_has_xcode:
mock_has_xcode.return_value = True
|
d-theme-dark-button-time.ts | /*
* Copyright (C) 2019 Toshiba Corporation
* SPDX-License-Identifier: Apache-2.0
*/
import { DButtonTime, DThemeButtonTime } from "../../d-button-time";
import { DPickerTimes } from "../../d-picker-times";
import { DStateAwareOrValueMightBe } from "../../d-state-aware";
import { DThemeDarkButton } from "./d-theme-dark-button";
const formatter = ( value: Date, caller: DButtonTime ): string => {
return DPickerTimes.format( value, caller.getDatetimeMask() );
};
export class | extends DThemeDarkButton<Date> implements DThemeButtonTime {
getTextFormatter(): ( value: Date, caller: DButtonTime ) => string {
return formatter;
}
newTextValue(): DStateAwareOrValueMightBe<Date> {
return new Date();
}
}
| DThemeDarkButtonTime |
content.rs | use aho_corasick::{AhoCorasickBuilder, MatchKind};
use lazy_static::lazy_static;
use crate::ast::{NodeData, ScriptOrStyleLang};
use crate::cfg::Cfg;
use crate::common::gen::codepoints::TAG_NAME_CHAR;
use crate::common::pattern::Replacer;
use crate::common::spec::tag::ns::Namespace;
use crate::common::spec::tag::whitespace::{
get_whitespace_minification_for_tag, WhitespaceMinification,
};
use crate::common::whitespace::{collapse_whitespace, is_all_whitespace, left_trim, right_trim};
use crate::entity::encode::encode_entities;
use crate::minify::bang::minify_bang;
use crate::minify::comment::minify_comment;
use crate::minify::css::minify_css;
use crate::minify::doctype::minify_doctype;
use crate::minify::element::minify_element;
use crate::minify::instruction::minify_instruction;
use crate::minify::js::minify_js;
fn | () -> Replacer {
let mut patterns = Vec::<Vec<u8>>::new();
let mut replacements = Vec::<Vec<u8>>::new();
// Replace all `<` with a `<` if it's followed by a TAG_NAME_CHAR, `/`, `!`, or `?`.
for c in 0u8..128u8 {
// TODO Create single lookup.
if TAG_NAME_CHAR[c] || c == b'/' || c == b'!' || c == b'?' {
patterns.push(vec![b'<', c]);
replacements.push(vec![b'&', b'L', b'T', c]);
};
}
Replacer::new(
AhoCorasickBuilder::new()
.dfa(true)
.match_kind(MatchKind::LeftmostLongest)
.build(patterns),
replacements,
)
}
lazy_static! {
static ref CHEVRON_REPLACER: Replacer = build_chevron_replacer();
}
pub fn minify_content(
cfg: &Cfg,
out: &mut Vec<u8>,
ns: Namespace,
descendant_of_pre: bool,
// Use empty slice if none.
parent: &[u8],
mut nodes: Vec<NodeData>,
) {
let &WhitespaceMinification {
collapse,
destroy_whole,
trim,
} = get_whitespace_minification_for_tag(ns, parent, descendant_of_pre);
// TODO Document or fix: even though bangs/comments/etc. don't affect layout, we don't collapse/destroy-whole/trim combined text nodes across bangs/comments/etc., as that's too complex and is ambiguous about which nodes should whitespace be deleted from.
let mut found_first_text_or_elem = false;
let mut index_of_last_nonempty_text_or_elem: isize = -1;
let mut index_of_last_text_or_elem: isize = -1;
for i in 0..nodes.len() {
let (previous_nodes, next_nodes) = nodes.split_at_mut(i);
let n = &mut next_nodes[0];
match n {
NodeData::Element { name, .. } => {
if index_of_last_nonempty_text_or_elem > -1 {
if let NodeData::Element {
next_sibling_element_name,
..
} = &mut previous_nodes[index_of_last_nonempty_text_or_elem as usize]
{
debug_assert!(next_sibling_element_name.is_empty());
next_sibling_element_name.extend_from_slice(name);
};
};
found_first_text_or_elem = true;
index_of_last_nonempty_text_or_elem = i as isize;
index_of_last_text_or_elem = i as isize;
}
NodeData::Text { value } => {
if !found_first_text_or_elem {
// This is the first element or text node, and it's a text node.
found_first_text_or_elem = true;
if trim {
left_trim(value);
};
};
// Our parser is guaranteed to output contiguous text as a single node,
// so the adjacent nodes to a text node (not counting comments/bangs/etc.) should be elements.
// TODO debug_assert this and add tests.
if destroy_whole && is_all_whitespace(value) {
value.clear();
} else if collapse {
collapse_whitespace(value);
};
// Set AFTER processing.
index_of_last_text_or_elem = i as isize;
if !value.is_empty() {
index_of_last_nonempty_text_or_elem = i as isize;
};
}
_ => {}
};
}
if trim && index_of_last_text_or_elem > -1 {
if let NodeData::Text { value } =
nodes.get_mut(index_of_last_text_or_elem as usize).unwrap()
{
right_trim(value);
};
}
for (i, c) in nodes.into_iter().enumerate() {
match c {
NodeData::Bang { code, ended } => minify_bang(cfg, out, &code, ended),
NodeData::Comment { code, ended } => minify_comment(cfg, out, &code, ended),
NodeData::Doctype { legacy, ended } => minify_doctype(cfg, out, &legacy, ended),
NodeData::Element {
attributes,
children,
closing_tag,
name,
namespace: child_ns,
next_sibling_element_name,
} => minify_element(
cfg,
out,
descendant_of_pre,
child_ns,
parent,
&next_sibling_element_name,
(i as isize) == index_of_last_nonempty_text_or_elem,
&name,
attributes,
closing_tag,
children,
),
NodeData::Instruction { code, ended } => minify_instruction(cfg, out, &code, ended),
NodeData::ScriptOrStyleContent { code, lang } => match lang {
ScriptOrStyleLang::CSS => minify_css(cfg, out, &code),
ScriptOrStyleLang::Data => out.extend_from_slice(&code),
ScriptOrStyleLang::JS => minify_js(cfg, out, &code),
},
NodeData::Text { value } => out
.extend_from_slice(&CHEVRON_REPLACER.replace_all(&encode_entities(&value, false))),
};
}
}
| build_chevron_replacer |
phone.factory.js | 'use strict'; | .factory('Phone', Phone);
Phone.$inject = ['$resource'];
function Phone($resource) {
return $resource('phones/:phoneId.json', {}, {
query: {method:'GET', params:{phoneId:'phones'}, isArray:true}
});
} |
angular.module('phonecat.core') |
gpio.rs | //! GPIO and pin configuration
use core::marker::PhantomData;
#[cfg(not(feature = "ulp"))]
use esp_idf_sys::*;
#[cfg(feature = "ulp")]
use crate::ulp::sys::*;
use crate::adc;
pub use chip::*;
/// A trait implemented by every pin insance
pub trait Pin: Send {
type Error;
fn pin(&self) -> i32;
}
/// A marker trait designating a pin which is capable of
/// operating as an input pin, even if its current mode
/// might be a different one
pub trait InputPin: Pin {}
/// A marker trait designating a pin which is capable of
/// operating as an output pin, even if its current mode
/// might be a different one
pub trait OutputPin: Pin {}
/// Functions available on pins with pull up/down resistors
//
// This is split into a separate trait from OutputPin, because for pins which also connect to
// the RTCIO mux, the pull up/down needs to be set via the RTCIO mux.
pub trait Pull {
type Error;
/// Enable internal pull up resistor, disable pull down
fn set_pull_up(&mut self) -> Result<&mut Self, Self::Error>;
/// Enable internal pull down resistor, disable pull up
fn set_pull_down(&mut self) -> Result<&mut Self, Self::Error>;
/// Enable internal pull up and down resistors
fn set_pull_up_down(&mut self) -> Result<&mut Self, Self::Error>;
/// Disable internal pull up and down resistors
fn set_floating(&mut self) -> Result<&mut Self, Self::Error>;
}
pub trait RTCPin: Pin {
fn rtc_pin(&self) -> i32;
}
/// A marker trait designating a pin which is capable of
/// operating as an ADC pin, even if its current mode
/// might be a different one
pub trait ADCPin: Pin {
fn adc_unit(&self) -> adc_unit_t;
fn adc_channel(&self) -> adc_channel_t;
}
/// A marker trait designating a pin which is capable of
/// operating as a DAC pin, even if its current mode
/// might be a different one
#[cfg(all(not(esp32c3), not(esp32s3)))]
pub trait DACPin: Pin {
fn dac_channel(&self) -> dac_channel_t;
}
/// A marker trait designating a pin which is capable of
/// operating as a touch pin, even if its current mode
/// might be a different one
#[cfg(not(esp32c3))]
pub trait TouchPin: Pin {
fn touch_channel(&self) -> touch_pad_t;
}
pub struct Input;
pub struct Output;
pub struct InputOutput;
pub struct Disabled;
pub struct Unknown;
/// Drive strength (values are approximates)
#[cfg(not(feature = "ulp"))]
pub enum DriveStrength {
I5mA = 0,
I10mA = 1,
I20mA = 2,
I40mA = 3,
}
#[cfg(not(feature = "ulp"))]
impl From<DriveStrength> for gpio_drive_cap_t {
fn from(strength: DriveStrength) -> gpio_drive_cap_t {
match strength {
DriveStrength::I5mA => gpio_drive_cap_t_GPIO_DRIVE_CAP_0,
DriveStrength::I10mA => gpio_drive_cap_t_GPIO_DRIVE_CAP_1,
DriveStrength::I20mA => gpio_drive_cap_t_GPIO_DRIVE_CAP_2,
DriveStrength::I40mA => gpio_drive_cap_t_GPIO_DRIVE_CAP_3,
}
}
}
#[cfg(not(feature = "ulp"))]
impl From<gpio_drive_cap_t> for DriveStrength {
#[allow(non_upper_case_globals)]
fn from(cap: gpio_drive_cap_t) -> DriveStrength {
match cap {
gpio_drive_cap_t_GPIO_DRIVE_CAP_0 => DriveStrength::I5mA,
gpio_drive_cap_t_GPIO_DRIVE_CAP_1 => DriveStrength::I10mA,
gpio_drive_cap_t_GPIO_DRIVE_CAP_2 => DriveStrength::I20mA,
gpio_drive_cap_t_GPIO_DRIVE_CAP_3 => DriveStrength::I40mA,
other => panic!("Unknown GPIO pin drive capability: {}", other),
}
}
}
macro_rules! impl_base {
($pxi:ident) => {
#[allow(dead_code)]
impl<MODE> $pxi<MODE>
where
MODE: Send,
{
fn reset(&mut self) -> Result<(), EspError> {
#[cfg(not(feature = "ulp"))]
let res = esp_result!(unsafe { gpio_reset_pin(self.pin()) }, ());
#[cfg(feature = "ulp")]
let res = Ok(());
res
}
fn get_input_level(&self) -> bool {
(unsafe { gpio_get_level(self.pin()) } != 0)
}
#[cfg(not(feature = "ulp"))]
fn get_output_level(&self) -> bool {
let pin = self.pin() as u32;
#[cfg(esp32c3)]
let is_set_high = unsafe { (*(GPIO_OUT_REG as *const u32) >> pin) & 0x01 != 0 };
#[cfg(not(esp32c3))]
let is_set_high = if pin <= 31 {
// GPIO0 - GPIO31
unsafe { (*(GPIO_OUT_REG as *const u32) >> pin) & 0x01 != 0 }
} else {
// GPIO32+
unsafe { (*(GPIO_OUT1_REG as *const u32) >> (pin - 32)) & 0x01 != 0 }
};
is_set_high
}
#[cfg(feature = "ulp")]
fn get_output_level(&self) -> bool {
(unsafe { gpio_get_output_level(self.pin()) } != 0)
}
fn set_output_level(&mut self, on: bool) -> Result<(), EspError> {
esp_result!(unsafe { gpio_set_level(self.pin(), (on as u8).into()) }, ())
}
#[cfg(not(feature = "ulp"))]
pub fn get_drive_strength(&self) -> Result<DriveStrength, EspError> {
let mut cap: gpio_drive_cap_t = 0;
esp!(unsafe { gpio_get_drive_capability(self.pin(), &mut cap as *mut _) })?;
Ok(cap.into())
}
#[cfg(not(feature = "ulp"))]
pub fn set_drive_strength(&mut self, strength: DriveStrength) -> Result<(), EspError> {
esp!(unsafe { gpio_set_drive_capability(self.pin(), strength.into()) })?;
Ok(())
}
fn set_disabled(&mut self) -> Result<(), EspError> {
esp!(unsafe { gpio_set_direction(self.pin(), gpio_mode_t_GPIO_MODE_DISABLE,) })?;
Ok(())
}
fn set_input(&mut self) -> Result<(), EspError> {
self.reset()?;
esp!(unsafe { gpio_set_direction(self.pin(), gpio_mode_t_GPIO_MODE_INPUT) })?;
Ok(())
}
fn set_input_output(&mut self) -> Result<(), EspError> {
self.reset()?;
esp!(unsafe {
gpio_set_direction(self.pin(), gpio_mode_t_GPIO_MODE_INPUT_OUTPUT)
})?;
Ok(())
}
fn set_input_output_od(&mut self) -> Result<(), EspError> {
self.reset()?;
esp!(unsafe {
gpio_set_direction(self.pin(), gpio_mode_t_GPIO_MODE_INPUT_OUTPUT_OD)
})?;
Ok(())
}
fn set_output(&mut self) -> Result<(), EspError> {
self.reset()?;
esp!(unsafe { gpio_set_direction(self.pin(), gpio_mode_t_GPIO_MODE_OUTPUT,) })?;
Ok(())
}
fn set_output_od(&mut self) -> Result<(), EspError> {
self.reset()?;
esp!(unsafe { gpio_set_direction(self.pin(), gpio_mode_t_GPIO_MODE_OUTPUT_OD,) })?;
Ok(())
}
}
};
}
macro_rules! impl_pull {
($pxi:ident: $mode:ident) => {
impl Pull for $pxi<$mode> {
type Error = EspError;
fn set_pull_up(&mut self) -> Result<&mut Self, Self::Error> {
esp_result!(
unsafe { gpio_set_pull_mode(self.pin(), gpio_pull_mode_t_GPIO_PULLUP_ONLY,) },
self
)
}
fn set_pull_down(&mut self) -> Result<&mut Self, Self::Error> {
esp_result!(
unsafe { gpio_set_pull_mode(self.pin(), gpio_pull_mode_t_GPIO_PULLDOWN_ONLY,) },
self
)
}
fn set_pull_up_down(&mut self) -> Result<&mut Self, Self::Error> {
esp_result!(
unsafe {
gpio_set_pull_mode(self.pin(), gpio_pull_mode_t_GPIO_PULLUP_PULLDOWN)
},
self
)
}
fn set_floating(&mut self) -> Result<&mut Self, Self::Error> {
esp_result!(
unsafe { gpio_set_pull_mode(self.pin(), gpio_pull_mode_t_GPIO_FLOATING,) },
self
)
}
}
};
}
macro_rules! impl_input_base {
($pxi:ident: $pin:expr) => {
pub struct $pxi<MODE> {
_mode: PhantomData<MODE>,
}
impl<MODE> $pxi<MODE>
where
MODE: Send,
{
/// # Safety
///
/// Care should be taken not to instantiate a pin which is already used elsewhere
pub unsafe fn new() -> $pxi<Unknown> {
$pxi { _mode: PhantomData }
}
pub fn into_unknown(self) -> Result<$pxi<Unknown>, EspError> {
Ok($pxi { _mode: PhantomData })
}
pub fn into_disabled(mut self) -> Result<$pxi<Disabled>, EspError> {
self.set_disabled()?;
Ok($pxi { _mode: PhantomData })
}
pub fn into_input(mut self) -> Result<$pxi<Input>, EspError> {
self.set_input()?;
Ok($pxi { _mode: PhantomData })
}
/// Degrades a concrete pin (e.g. [`Gpio1`]) to a generic pin
/// struct that can also be used with periphals.
pub fn degrade(self) -> GpioPin<MODE> {
unsafe { GpioPin::new($pin) }
}
}
impl<MODE> Pin for $pxi<MODE>
where
MODE: Send,
{
type Error = EspError;
#[inline(always)]
fn pin(&self) -> i32 {
$pin
}
}
impl<MODE> InputPin for $pxi<MODE> where MODE: Send {}
impl_base!($pxi);
impl_hal_input_pin!($pxi: Input);
};
}
#[allow(unused)]
macro_rules! impl_input_only {
($pxi:ident: $pin:expr) => {
impl_input_base!($pxi: $pin);
};
}
macro_rules! impl_input_output {
($pxi:ident: $pin:expr) => {
impl_input_base!($pxi: $pin);
impl_pull!($pxi: Input);
impl_pull!($pxi: InputOutput);
impl_hal_input_pin!($pxi: InputOutput);
impl<MODE> OutputPin for $pxi<MODE> where MODE: Send {}
impl_hal_output_pin!($pxi: InputOutput);
impl_hal_output_pin!($pxi: Output);
impl<MODE> $pxi<MODE>
where
MODE: Send,
{
pub fn into_input_output(mut self) -> Result<$pxi<InputOutput>, EspError> {
self.set_input_output()?;
Ok($pxi { _mode: PhantomData })
}
pub fn into_input_output_od(mut self) -> Result<$pxi<InputOutput>, EspError> {
self.set_input_output_od()?;
Ok($pxi { _mode: PhantomData })
}
pub fn into_output(mut self) -> Result<$pxi<Output>, EspError> {
self.set_output()?;
Ok($pxi { _mode: PhantomData })
}
pub fn into_output_od(mut self) -> Result<$pxi<Output>, EspError> {
self.set_output_od()?;
Ok($pxi { _mode: PhantomData })
}
}
};
}
macro_rules! impl_rtc {
($pxi:ident: $pin:expr, RTC: $rtc:expr) => {
impl<MODE> RTCPin for $pxi<MODE>
where
MODE: Send,
{
fn rtc_pin(&self) -> i32 {
$rtc
}
}
};
($pxi:ident: $pin:expr, NORTC: $rtc:expr) => {};
}
macro_rules! impl_adc {
($pxi:ident: $pin:expr, ADC1: $adc:expr) => {
#[cfg(not(feature = "ulp"))]
impl<MODE> $pxi<MODE>
where
MODE: Send,
{
pub fn into_analog_atten_0db(
mut self,
) -> Result<$pxi<adc::Atten0dB<adc::ADC1>>, EspError> {
self.reset()?;
esp!(unsafe { adc1_config_channel_atten($adc, adc_atten_t_ADC_ATTEN_DB_0) })?;
Ok($pxi { _mode: PhantomData })
}
pub fn into_analog_atten_2p5db(
mut self,
) -> Result<$pxi<adc::Atten2p5dB<adc::ADC1>>, EspError> {
self.reset()?;
esp!(unsafe { adc1_config_channel_atten($adc, adc_atten_t_ADC_ATTEN_DB_2_5) })?;
Ok($pxi { _mode: PhantomData })
}
pub fn into_analog_atten_6db(
mut self,
) -> Result<$pxi<adc::Atten6dB<adc::ADC1>>, EspError> {
self.reset()?;
esp!(unsafe { adc1_config_channel_atten($adc, adc_atten_t_ADC_ATTEN_DB_6) })?;
Ok($pxi { _mode: PhantomData })
}
pub fn into_analog_atten_11db(
mut self,
) -> Result<$pxi<adc::Atten11dB<adc::ADC1>>, EspError> {
self.reset()?;
esp!(unsafe { adc1_config_channel_atten($adc, adc_atten_t_ADC_ATTEN_DB_11) })?;
Ok($pxi { _mode: PhantomData })
}
}
impl<MODE> ADCPin for $pxi<MODE>
where
MODE: Send,
{
fn adc_unit(&self) -> adc_unit_t {
adc_unit_t_ADC_UNIT_1
}
fn adc_channel(&self) -> adc_channel_t {
$adc
}
}
impl<AN> embedded_hal_0_2::adc::Channel<AN> for $pxi<AN>
where
AN: adc::Analog<adc::ADC1> + Send,
{
type ID = u8;
fn channel() -> Self::ID {
$adc
}
}
impl<AN> embedded_hal::adc::nb::Channel<AN> for $pxi<AN>
where
AN: adc::Analog<adc::ADC1> + Send,
{
type ID = u8;
fn channel(&self) -> Self::ID {
$adc
}
}
};
($pxi:ident: $pin:expr, ADC2: $adc:expr) => {
#[cfg(not(feature = "ulp"))]
impl<MODE> $pxi<MODE>
where
MODE: Send,
{
pub fn into_analog_atten_0db(
mut self,
) -> Result<$pxi<adc::Atten0dB<adc::ADC2>>, EspError> {
self.reset()?;
esp!(unsafe { adc2_config_channel_atten($adc, adc_atten_t_ADC_ATTEN_DB_0) })?;
Ok($pxi { _mode: PhantomData })
}
pub fn into_analog_atten_2p5db(
mut self,
) -> Result<$pxi<adc::Atten2p5dB<adc::ADC2>>, EspError> {
self.reset()?;
esp!(unsafe { adc2_config_channel_atten($adc, adc_atten_t_ADC_ATTEN_DB_2_5) })?;
Ok($pxi { _mode: PhantomData })
}
pub fn into_analog_atten_6db(
mut self,
) -> Result<$pxi<adc::Atten6dB<adc::ADC2>>, EspError> {
self.reset()?;
esp!(unsafe { adc2_config_channel_atten($adc, adc_atten_t_ADC_ATTEN_DB_6) })?;
Ok($pxi { _mode: PhantomData })
}
pub fn into_analog_atten_11db(
mut self,
) -> Result<$pxi<adc::Atten11dB<adc::ADC2>>, EspError> {
self.reset()?;
esp!(unsafe { adc2_config_channel_atten($adc, adc_atten_t_ADC_ATTEN_DB_11) })?;
Ok($pxi { _mode: PhantomData })
}
}
impl<MODE> ADCPin for $pxi<MODE>
where
MODE: Send,
{
fn adc_unit(&self) -> adc_unit_t {
adc_unit_t_ADC_UNIT_2
}
fn adc_channel(&self) -> adc_channel_t {
$adc
}
}
impl<AN> embedded_hal_0_2::adc::Channel<AN> for $pxi<AN>
where
AN: adc::Analog<adc::ADC2> + Send,
{
type ID = u8;
fn channel() -> Self::ID {
$adc as u8
}
}
impl<AN> embedded_hal::adc::nb::Channel<AN> for $pxi<AN>
where
AN: adc::Analog<adc::ADC2> + Send,
{
type ID = u8;
fn channel(&self) -> Self::ID {
adc_unit_t_ADC_UNIT_2 as u8
}
}
};
($pxi:ident: $pin:expr, NOADC: $adc:expr) => {};
}
macro_rules! impl_dac {
($pxi:ident: $pin:expr, DAC: $dac:expr) => {
#[cfg(all(not(esp32c3), not(esp32s3)))]
impl<MODE> DACPin for $pxi<MODE>
where
MODE: Send,
{
fn dac_channel(&self) -> dac_channel_t {
$dac
}
}
};
($pxi:ident: $pin:expr, NODAC: $dac:expr) => {};
}
macro_rules! impl_touch {
($pxi:ident: $pin:expr, TOUCH: $touch:expr) => {
#[cfg(not(esp32c3))]
impl<MODE> TouchPin for $pxi<MODE>
where
MODE: Send,
{
fn touch_channel(&self) -> touch_pad_t {
$touch
}
}
};
($pxi:ident: $pin:expr, NOTOUCH: $touch:expr) => {};
}
macro_rules! impl_hal_input_pin {
($pxi:ident: $mode:ident) => {
impl embedded_hal_0_2::digital::v2::InputPin for $pxi<$mode> {
type Error = EspError;
fn is_high(&self) -> Result<bool, Self::Error> {
Ok(self.get_input_level())
}
fn is_low(&self) -> Result<bool, Self::Error> {
Ok(!self.get_input_level())
}
}
impl embedded_hal::digital::blocking::InputPin for $pxi<$mode> {
type Error = EspError;
fn is_high(&self) -> Result<bool, Self::Error> {
Ok(self.get_input_level())
}
fn is_low(&self) -> Result<bool, Self::Error> {
Ok(!self.get_input_level())
}
}
};
}
macro_rules! impl_hal_output_pin {
($pxi:ident: $mode:ident) => {
impl embedded_hal_0_2::digital::v2::OutputPin for $pxi<$mode> {
type Error = EspError;
fn set_high(&mut self) -> Result<(), Self::Error> {
self.set_output_level(true)
}
fn set_low(&mut self) -> Result<(), Self::Error> {
self.set_output_level(false)
}
}
impl embedded_hal::digital::blocking::OutputPin for $pxi<$mode> {
type Error = EspError;
fn set_high(&mut self) -> Result<(), Self::Error> {
self.set_output_level(true)
}
fn set_low(&mut self) -> Result<(), Self::Error> {
self.set_output_level(false)
}
}
impl embedded_hal::digital::blocking::StatefulOutputPin for $pxi<$mode> {
fn is_set_high(&self) -> Result<bool, Self::Error> {
Ok(self.get_output_level())
}
fn is_set_low(&self) -> Result<bool, Self::Error> {
Ok(!self.get_output_level())
}
}
impl embedded_hal_0_2::digital::v2::StatefulOutputPin for $pxi<$mode> {
fn is_set_high(&self) -> Result<bool, Self::Error> {
Ok(self.get_output_level())
}
fn is_set_low(&self) -> Result<bool, Self::Error> {
Ok(!self.get_output_level())
}
}
impl embedded_hal_0_2::digital::v2::ToggleableOutputPin for $pxi<$mode> {
type Error = EspError;
fn toggle(&mut self) -> Result<(), Self::Error> {
self.set_output_level(!self.get_output_level())
}
}
impl embedded_hal::digital::blocking::ToggleableOutputPin for $pxi<$mode> {
type Error = EspError;
fn toggle(&mut self) -> Result<(), Self::Error> {
self.set_output_level(!self.get_output_level())
}
}
};
}
macro_rules! pin {
($pxi:ident: $pin:expr, Input, $rtc:ident: $rtcno:expr, $adc:ident: $adcno:expr, $dac:ident: $dacno:expr, $touch:ident: $touchno:expr) => {
impl_input_only!($pxi: $pin);
impl_rtc!($pxi: $pin, $rtc: $rtcno);
impl_adc!($pxi: $pin, $adc: $adcno);
impl_dac!($pxi: $pin, $dac: $dacno);
impl_touch!($pxi: $pin, $touch: $touchno);
};
($pxi:ident: $pin:expr, IO, $rtc:ident: $rtcno:expr, $adc:ident: $adcno:expr, $dac:ident: $dacno:expr, $touch:ident: $touchno:expr) => {
impl_input_output!($pxi: $pin);
impl_rtc!($pxi: $pin, $rtc: $rtcno);
impl_adc!($pxi: $pin, $adc: $adcno);
impl_dac!($pxi: $pin, $dac: $dacno);
impl_touch!($pxi: $pin, $touch: $touchno);
};
}
/// Generic $GpioX pin
pub struct GpioPin<MODE> {
pin: i32,
_mode: PhantomData<MODE>,
}
impl<MODE> GpioPin<MODE>
where
MODE: Send,
{
/// # Safety
///
/// Care should be taken not to instantiate this Pin, if it is
/// already instantiated and used elsewhere, or if it is not set
/// already in the mode of operation which is being instantiated
pub unsafe fn new(pin: i32) -> GpioPin<MODE> {
Self {
pin,
_mode: PhantomData,
}
}
}
impl<MODE> Pin for GpioPin<MODE>
where
MODE: Send,
{
type Error = EspError;
fn pin(&self) -> i32
where
Self: Sized,
{
self.pin
}
}
impl InputPin for GpioPin<Input> {}
impl OutputPin for GpioPin<Output> {}
impl InputPin for GpioPin<InputOutput> {}
impl OutputPin for GpioPin<InputOutput> {}
impl_base!(GpioPin);
impl_hal_input_pin!(GpioPin: Input);
impl_hal_input_pin!(GpioPin: InputOutput);
impl_hal_output_pin!(GpioPin: InputOutput);
impl_hal_output_pin!(GpioPin: Output);
#[cfg(esp32)]
mod chip {
use core::marker::PhantomData;
#[cfg(not(feature = "ulp"))]
use esp_idf_sys::*;
use super::*;
#[cfg(feature = "ulp")]
use crate::ulp::sys::*;
// NOTE: Gpio26 - Gpio32 are used by SPI0/SPI1 for external PSRAM/SPI Flash and
// are not recommended for other uses
pin!(Gpio0:0, IO, RTC:11, ADC2:1, NODAC:0, TOUCH:1);
#[cfg(not(feature = "ulp"))]
pin!(Gpio1:1, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pin!(Gpio2:2, IO, RTC:12, ADC2:2, NODAC:0, TOUCH:2);
#[cfg(not(feature = "ulp"))]
pin!(Gpio3:3, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pin!(Gpio4:4, IO, RTC:10, ADC2:0, NODAC:0, TOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio5:5, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio6:6, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio7:7, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio8:8, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio9:9, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio10:10, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio11:11, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pin!(Gpio12:12, IO, RTC:15, ADC2:5, NODAC:0, TOUCH:5);
pin!(Gpio13:13, IO, RTC:14, ADC2:4, NODAC:0, TOUCH:4);
pin!(Gpio14:14, IO, RTC:16, ADC2:6, NODAC:0, TOUCH:6);
pin!(Gpio15:15, IO, RTC:13, ADC2:3, NODAC:0, TOUCH:3);
#[cfg(not(feature = "ulp"))]
pin!(Gpio16:16, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio17:17, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio18:18, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio19:19, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio21:21, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio22:22, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio23:23, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pin!(Gpio25:25, IO, RTC:6, ADC2:8, DAC:1, NOTOUCH:0);
pin!(Gpio26:26, IO, RTC:7, ADC2:9, DAC:2, NOTOUCH:0);
pin!(Gpio27:27, IO, RTC:17, ADC2:7, NODAC:0, TOUCH:7);
pin!(Gpio32:32, IO, RTC:9, ADC1:4, NODAC:0, TOUCH:9);
pin!(Gpio33:33, IO, RTC:8, ADC1:5, NODAC:0, TOUCH:8);
pin!(Gpio34:34, Input, RTC:4, ADC1:6, NODAC:0, NOTOUCH:0);
pin!(Gpio35:35, Input, RTC:5, ADC1:7, NODAC:0, NOTOUCH:0);
pin!(Gpio36:36, Input, RTC:0, ADC1:0, NODAC:0, NOTOUCH:0);
pin!(Gpio37:37, Input, RTC:1, ADC1:1, NODAC:0, NOTOUCH:0);
pin!(Gpio38:38, Input, RTC:2, ADC1:2, NODAC:0, NOTOUCH:0);
pin!(Gpio39:39, Input, RTC:3, ADC1:3, NODAC:0, NOTOUCH:0);
pub struct Pins {
pub gpio0: Gpio0<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio1: Gpio1<Unknown>,
pub gpio2: Gpio2<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio3: Gpio3<Unknown>,
pub gpio4: Gpio4<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio5: Gpio5<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio6: Gpio6<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio7: Gpio7<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio8: Gpio8<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio9: Gpio9<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio10: Gpio10<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio11: Gpio11<Unknown>,
pub gpio12: Gpio12<Unknown>,
pub gpio13: Gpio13<Unknown>,
pub gpio14: Gpio14<Unknown>,
pub gpio15: Gpio15<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio16: Gpio16<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio17: Gpio17<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio18: Gpio18<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio19: Gpio19<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio21: Gpio21<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio22: Gpio22<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio23: Gpio23<Unknown>,
pub gpio25: Gpio25<Unknown>,
pub gpio26: Gpio26<Unknown>,
pub gpio27: Gpio27<Unknown>,
pub gpio32: Gpio32<Unknown>,
pub gpio33: Gpio33<Unknown>,
pub gpio34: Gpio34<Unknown>,
pub gpio35: Gpio35<Unknown>,
pub gpio36: Gpio36<Unknown>,
pub gpio37: Gpio37<Unknown>,
pub gpio38: Gpio38<Unknown>,
pub gpio39: Gpio39<Unknown>,
}
impl Pins {
/// # Safety
///
/// Care should be taken not to instantiate the Pins structure, if it is
/// already instantiated and used elsewhere
pub unsafe fn new() -> Self {
Self {
gpio0: Gpio0::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio1: Gpio1::<Unknown>::new(),
gpio2: Gpio2::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio3: Gpio3::<Unknown>::new(),
gpio4: Gpio4::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio5: Gpio5::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio6: Gpio6::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio7: Gpio7::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio8: Gpio8::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio9: Gpio9::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio10: Gpio10::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio11: Gpio11::<Unknown>::new(),
gpio12: Gpio12::<Unknown>::new(),
gpio13: Gpio13::<Unknown>::new(),
gpio14: Gpio14::<Unknown>::new(),
gpio15: Gpio15::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio16: Gpio16::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio17: Gpio17::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio18: Gpio18::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio19: Gpio19::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio21: Gpio21::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio22: Gpio22::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio23: Gpio23::<Unknown>::new(),
gpio25: Gpio25::<Unknown>::new(),
gpio26: Gpio26::<Unknown>::new(),
gpio27: Gpio27::<Unknown>::new(),
gpio32: Gpio32::<Unknown>::new(),
gpio33: Gpio33::<Unknown>::new(),
gpio34: Gpio34::<Unknown>::new(),
gpio35: Gpio35::<Unknown>::new(),
gpio36: Gpio36::<Unknown>::new(),
gpio37: Gpio37::<Unknown>::new(),
gpio38: Gpio38::<Unknown>::new(),
gpio39: Gpio39::<Unknown>::new(),
}
}
}
}
#[cfg(any(esp32s2, esp32s3))]
mod chip {
use core::marker::PhantomData;
#[cfg(not(feature = "ulp"))]
use esp_idf_sys::*;
use super::*;
// NOTE: Gpio26 - Gpio32 (and Gpio33 - Gpio37 if using Octal RAM/Flash) are used
// by SPI0/SPI1 for external PSRAM/SPI Flash and are not recommended for
// other uses
pin!(Gpio0:0, IO, RTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pin!(Gpio1:1, IO, RTC:1, ADC1:0, NODAC:0, TOUCH:1);
pin!(Gpio2:2, IO, RTC:2, ADC1:1, NODAC:0, TOUCH:2);
pin!(Gpio3:3, IO, RTC:3, ADC1:2, NODAC:0, TOUCH:3);
pin!(Gpio4:4, IO, RTC:4, ADC1:3, NODAC:0, TOUCH:4);
pin!(Gpio5:5, IO, RTC:5, ADC1:4, NODAC:0, TOUCH:5);
pin!(Gpio6:6, IO, RTC:6, ADC1:5, NODAC:0, TOUCH:6);
pin!(Gpio7:7, IO, RTC:7, ADC1:6, NODAC:0, TOUCH:7);
pin!(Gpio8:8, IO, RTC:8, ADC1:7, NODAC:0, TOUCH:8);
pin!(Gpio9:9, IO, RTC:9, ADC1:8, NODAC:0, TOUCH:9);
pin!(Gpio10:10, IO, RTC:10, ADC1:9, NODAC:0, TOUCH:10);
pin!(Gpio11:11, IO, RTC:11, ADC2:0, NODAC:0, TOUCH:11);
pin!(Gpio12:12, IO, RTC:12, ADC2:1, NODAC:0, TOUCH:12);
pin!(Gpio13:13, IO, RTC:13, ADC2:2, NODAC:0, TOUCH:13);
pin!(Gpio14:14, IO, RTC:14, ADC2:3, NODAC:0, TOUCH:14);
pin!(Gpio15:15, IO, RTC:15, ADC2:4, NODAC:0, NOTOUCH:0);
pin!(Gpio16:16, IO, RTC:16, ADC2:5, NODAC:0, NOTOUCH:0);
#[cfg(esp32s2)]
pin!(Gpio17:17, IO, RTC:17, ADC2:6, DAC:1, NOTOUCH:0);
#[cfg(esp32s3)]
pin!(Gpio17:17, IO, RTC:17, ADC2:6, NODAC:0, NOTOUCH:0);
#[cfg(esp32s2)]
pin!(Gpio18:18, IO, RTC:18, ADC2:7, DAC:2, NOTOUCH:0);
#[cfg(esp32s3)]
pin!(Gpio18:18, IO, RTC:18, ADC2:7, NODAC:0, NOTOUCH:0);
pin!(Gpio19:19, IO, RTC:19, ADC2:8, NODAC:0, NOTOUCH:0);
pin!(Gpio20:20, IO, RTC:20, ADC2:9, NODAC:0, NOTOUCH:0);
pin!(Gpio21:21, IO, RTC:21, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio26:26, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio27:27, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio28:28, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio29:29, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio30:30, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio31:31, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio32:32, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio33:33, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio34:34, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio35:35, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio36:36, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio37:37, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio38:38, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio39:39, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio40:40, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio41:41, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio42:42, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio43:43, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio44:44, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(not(feature = "ulp"))]
pin!(Gpio45:45, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(all(esp32s2, not(feature = "ulp")))]
pin!(Gpio46:46, Input, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(all(esp32s3, not(feature = "ulp")))]
pin!(Gpio46:46, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(all(esp32s3, not(feature = "ulp")))]
pin!(Gpio47:47, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
#[cfg(all(esp32s3, not(feature = "ulp")))]
pin!(Gpio48:48, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pub struct Pins {
pub gpio0: Gpio0<Unknown>,
pub gpio1: Gpio1<Unknown>,
pub gpio2: Gpio2<Unknown>,
pub gpio3: Gpio3<Unknown>,
pub gpio4: Gpio4<Unknown>,
pub gpio5: Gpio5<Unknown>,
pub gpio6: Gpio6<Unknown>,
pub gpio7: Gpio7<Unknown>,
pub gpio8: Gpio8<Unknown>,
pub gpio9: Gpio9<Unknown>,
pub gpio10: Gpio10<Unknown>,
pub gpio11: Gpio11<Unknown>,
pub gpio12: Gpio12<Unknown>,
pub gpio13: Gpio13<Unknown>,
pub gpio14: Gpio14<Unknown>,
pub gpio15: Gpio15<Unknown>,
pub gpio16: Gpio16<Unknown>,
pub gpio17: Gpio17<Unknown>,
pub gpio18: Gpio18<Unknown>,
pub gpio19: Gpio19<Unknown>,
pub gpio20: Gpio20<Unknown>,
pub gpio21: Gpio21<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio26: Gpio26<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio27: Gpio27<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio28: Gpio28<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio29: Gpio29<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio30: Gpio30<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio31: Gpio31<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio32: Gpio32<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio33: Gpio33<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio34: Gpio34<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio35: Gpio35<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio36: Gpio36<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio37: Gpio37<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio38: Gpio38<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio39: Gpio39<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio40: Gpio40<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio41: Gpio41<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio42: Gpio42<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio43: Gpio43<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio44: Gpio44<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio45: Gpio45<Unknown>,
#[cfg(not(feature = "ulp"))]
pub gpio46: Gpio46<Unknown>,
#[cfg(all(esp32s3, not(feature = "ulp")))]
pub gpio47: Gpio47<Unknown>,
#[cfg(all(esp32s3, not(feature = "ulp")))]
pub gpio48: Gpio48<Unknown>,
}
impl Pins {
/// # Safety
///
/// Care should be taken not to instantiate the Pins structure, if it is
/// already instantiated and used elsewhere
pub unsafe fn new() -> Self {
Self {
gpio0: Gpio0::<Unknown>::new(),
gpio1: Gpio1::<Unknown>::new(),
gpio2: Gpio2::<Unknown>::new(),
gpio3: Gpio3::<Unknown>::new(),
gpio4: Gpio4::<Unknown>::new(),
gpio5: Gpio5::<Unknown>::new(),
gpio6: Gpio6::<Unknown>::new(),
gpio7: Gpio7::<Unknown>::new(),
gpio8: Gpio8::<Unknown>::new(),
gpio9: Gpio9::<Unknown>::new(),
gpio10: Gpio10::<Unknown>::new(),
gpio11: Gpio11::<Unknown>::new(),
gpio12: Gpio12::<Unknown>::new(),
gpio13: Gpio13::<Unknown>::new(),
gpio14: Gpio14::<Unknown>::new(),
gpio15: Gpio15::<Unknown>::new(),
gpio16: Gpio16::<Unknown>::new(),
gpio17: Gpio17::<Unknown>::new(),
gpio18: Gpio18::<Unknown>::new(),
gpio19: Gpio19::<Unknown>::new(),
gpio20: Gpio20::<Unknown>::new(),
gpio21: Gpio21::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio26: Gpio26::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio27: Gpio27::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio28: Gpio28::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio29: Gpio29::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio30: Gpio30::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio31: Gpio31::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio32: Gpio32::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio33: Gpio33::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio34: Gpio34::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio35: Gpio35::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio36: Gpio36::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio37: Gpio37::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio38: Gpio38::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio39: Gpio39::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio40: Gpio40::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio41: Gpio41::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio42: Gpio42::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio43: Gpio43::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio44: Gpio44::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio45: Gpio45::<Unknown>::new(),
#[cfg(not(feature = "ulp"))]
gpio46: Gpio46::<Unknown>::new(),
#[cfg(all(esp32s3, not(feature = "ulp")))]
gpio47: Gpio47::<Unknown>::new(),
#[cfg(all(esp32s3, not(feature = "ulp")))]
gpio48: Gpio48::<Unknown>::new(),
}
}
}
}
| mod chip {
use core::marker::PhantomData;
use esp_idf_sys::*;
use super::*;
// NOTE: Gpio12 - Gpio17 are used by SPI0/SPI1 for external PSRAM/SPI Flash and
// are not recommended for other uses
pin!(Gpio0:0, IO, RTC:0, ADC1:0, NODAC:0, NOTOUCH:0);
pin!(Gpio1:1, IO, RTC:1, ADC1:1, NODAC:0, NOTOUCH:0);
pin!(Gpio2:2, IO, RTC:2, ADC1:2, NODAC:0, NOTOUCH:0);
pin!(Gpio3:3, IO, RTC:3, ADC1:3, NODAC:0, NOTOUCH:0);
pin!(Gpio4:4, IO, RTC:4, ADC1:4, NODAC:0, NOTOUCH:0);
pin!(Gpio5:5, IO, RTC:5, ADC2:0, NODAC:0, NOTOUCH:0);
pin!(Gpio6:6, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pin!(Gpio7:7, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pin!(Gpio8:8, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pin!(Gpio9:9, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pin!(Gpio10:10, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pin!(Gpio11:11, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pin!(Gpio12:12, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pin!(Gpio13:13, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pin!(Gpio14:14, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pin!(Gpio15:15, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pin!(Gpio16:16, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pin!(Gpio17:17, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pin!(Gpio18:18, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pin!(Gpio19:19, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pin!(Gpio20:20, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pin!(Gpio21:21, IO, NORTC:0, NOADC:0, NODAC:0, NOTOUCH:0);
pub struct Pins {
pub gpio0: Gpio0<Unknown>,
pub gpio1: Gpio1<Unknown>,
pub gpio2: Gpio2<Unknown>,
pub gpio3: Gpio3<Unknown>,
pub gpio4: Gpio4<Unknown>,
pub gpio5: Gpio5<Unknown>,
pub gpio6: Gpio6<Unknown>,
pub gpio7: Gpio7<Unknown>,
pub gpio8: Gpio8<Unknown>,
pub gpio9: Gpio9<Unknown>,
pub gpio10: Gpio10<Unknown>,
pub gpio11: Gpio11<Unknown>,
pub gpio12: Gpio12<Unknown>,
pub gpio13: Gpio13<Unknown>,
pub gpio14: Gpio14<Unknown>,
pub gpio15: Gpio15<Unknown>,
pub gpio16: Gpio16<Unknown>,
pub gpio17: Gpio17<Unknown>,
pub gpio18: Gpio18<Unknown>,
pub gpio19: Gpio19<Unknown>,
pub gpio20: Gpio20<Unknown>,
pub gpio21: Gpio21<Unknown>,
}
impl Pins {
/// # Safety
///
/// Care should be taken not to instantiate the Pins structure, if it is
/// already instantiated and used elsewhere
pub unsafe fn new() -> Self {
Self {
gpio0: Gpio0::<Unknown>::new(),
gpio1: Gpio1::<Unknown>::new(),
gpio2: Gpio2::<Unknown>::new(),
gpio3: Gpio3::<Unknown>::new(),
gpio4: Gpio4::<Unknown>::new(),
gpio5: Gpio5::<Unknown>::new(),
gpio6: Gpio6::<Unknown>::new(),
gpio7: Gpio7::<Unknown>::new(),
gpio8: Gpio8::<Unknown>::new(),
gpio9: Gpio9::<Unknown>::new(),
gpio10: Gpio10::<Unknown>::new(),
gpio11: Gpio11::<Unknown>::new(),
gpio12: Gpio12::<Unknown>::new(),
gpio13: Gpio13::<Unknown>::new(),
gpio14: Gpio14::<Unknown>::new(),
gpio15: Gpio15::<Unknown>::new(),
gpio16: Gpio16::<Unknown>::new(),
gpio17: Gpio17::<Unknown>::new(),
gpio18: Gpio18::<Unknown>::new(),
gpio19: Gpio19::<Unknown>::new(),
gpio20: Gpio20::<Unknown>::new(),
gpio21: Gpio21::<Unknown>::new(),
}
}
}
} | #[cfg(esp32c3)]
#[cfg(not(feature = "ulp"))] |
task_htmlmin.js | angular.module('task_htmlmin').run(['$templateCache', function($templateCache) { |
$templateCache.put('test/fixtures/two.html',
"<h2>Two</h2><textarea readonly>We are two.</textarea>"
);
}]); | 'use strict'; |
http_server.rs | use std::{convert::Infallible, time::Duration};
use futures::future;
use http::{Response, StatusCode};
use hyper::{
server::{conn::AddrIncoming, Builder},
Body,
};
use serde::Serialize;
use tokio::sync::oneshot;
use tower::builder::ServiceBuilder;
use tracing::{info, trace};
use warp::{Filter, Rejection};
use casper_types::ProtocolVersion;
use super::{
rpcs::{self, RpcWithOptionalParamsExt, RpcWithParamsExt, RpcWithoutParamsExt, RPC_API_PATH},
ReactorEventT,
};
use crate::effect::EffectBuilder;
// This is a workaround for not being able to create a `warp_json_rpc::Response` without a
// `warp_json_rpc::Builder`.
fn new_error_response(error: warp_json_rpc::Error) -> Response<Body> {
#[derive(Serialize)]
struct JsonRpcErrorResponse {
jsonrpc: String,
id: Option<()>,
error: warp_json_rpc::Error,
}
let json_response = JsonRpcErrorResponse {
jsonrpc: "2.0".to_string(),
id: None,
error,
};
let body = Body::from(serde_json::to_vec(&json_response).unwrap());
Response::builder()
.status(StatusCode::OK)
.header("Content-Type", "application/json")
.body(body)
.unwrap()
}
/// Run the JSON-RPC server.
pub(super) async fn | <REv: ReactorEventT>(
builder: Builder<AddrIncoming>,
effect_builder: EffectBuilder<REv>,
api_version: ProtocolVersion,
qps_limit: u64,
) {
// RPC filters.
let rpc_put_deploy = rpcs::account::PutDeploy::create_filter(effect_builder, api_version);
let rpc_get_block = rpcs::chain::GetBlock::create_filter(effect_builder, api_version);
let rpc_get_block_transfers =
rpcs::chain::GetBlockTransfers::create_filter(effect_builder, api_version);
let rpc_get_state_root_hash =
rpcs::chain::GetStateRootHash::create_filter(effect_builder, api_version);
let rpc_get_item = rpcs::state::GetItem::create_filter(effect_builder, api_version);
let rpc_get_balance = rpcs::state::GetBalance::create_filter(effect_builder, api_version);
let rpc_get_deploy = rpcs::info::GetDeploy::create_filter(effect_builder, api_version);
let rpc_get_peers = rpcs::info::GetPeers::create_filter(effect_builder, api_version);
let rpc_get_status = rpcs::info::GetStatus::create_filter(effect_builder, api_version);
let rpc_get_era_info =
rpcs::chain::GetEraInfoBySwitchBlock::create_filter(effect_builder, api_version);
let rpc_get_auction_info =
rpcs::state::GetAuctionInfo::create_filter(effect_builder, api_version);
let rpc_get_rpcs = rpcs::docs::ListRpcs::create_filter(effect_builder, api_version);
// Catch requests where the method is not one we handle.
let unknown_method = warp::path(RPC_API_PATH)
.and(warp_json_rpc::filters::json_rpc())
.and_then(move |response_builder: warp_json_rpc::Builder| async move {
response_builder
.error(warp_json_rpc::Error::METHOD_NOT_FOUND)
.map_err(|_| warp::reject())
});
// Catch requests which don't parse as JSON.
let parse_failure = warp::path(RPC_API_PATH).and_then(move || async move {
let error_response = new_error_response(warp_json_rpc::Error::PARSE_ERROR);
Ok::<_, Rejection>(error_response)
});
// TODO - we can't catch cases where we should return `warp_json_rpc::Error::INVALID_REQUEST`
// (i.e. where the request is JSON, but not valid JSON-RPC). This will require an
// update to or move away from warp_json_rpc.
let service = warp_json_rpc::service(
rpc_put_deploy
.or(rpc_get_block)
.or(rpc_get_block_transfers)
.or(rpc_get_state_root_hash)
.or(rpc_get_item)
.or(rpc_get_balance)
.or(rpc_get_deploy)
.or(rpc_get_peers)
.or(rpc_get_status)
.or(rpc_get_era_info)
.or(rpc_get_auction_info)
.or(rpc_get_rpcs)
.or(unknown_method)
.or(parse_failure),
);
// Start the server, passing a oneshot receiver to allow the server to be shut down gracefully.
let make_svc =
hyper::service::make_service_fn(move |_| future::ok::<_, Infallible>(service.clone()));
let make_svc = ServiceBuilder::new()
.rate_limit(qps_limit, Duration::from_secs(1))
.service(make_svc);
let (shutdown_sender, shutdown_receiver) = oneshot::channel::<()>();
let server = builder.serve(make_svc);
info!(address = %server.local_addr(), "started JSON-RPC server");
let server_with_shutdown = server.with_graceful_shutdown(async {
shutdown_receiver.await.ok();
});
let server_joiner = tokio::spawn(server_with_shutdown);
let _ = server_joiner.await;
// Shut down the server.
let _ = shutdown_sender.send(());
trace!("JSON-RPC server stopped");
}
| run |
regions-bounds.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Check that explicit region bounds are allowed on the various
// nominal types (but not on other types) and that they are type
// checked.
struct an_enum<'a>(&'a int);
struct a_class<'a> { x:&'a int }
fn a_fn1<'a,'b>(e: an_enum<'a>) -> an_enum<'b> {
return e; //~ ERROR mismatched types: expected `an_enum<'b>` but found `an_enum<'a>`
//~^ ERROR cannot infer an appropriate lifetime
}
fn a_fn3<'a,'b>(e: a_class<'a>) -> a_class<'b> {
return e; //~ ERROR mismatched types: expected `a_class<'b>` but found `a_class<'a>`
//~^ ERROR cannot infer an appropriate lifetime
}
fn main() | { } |
|
pkg.py | # -*- coding: utf-8 -*-
'''
tests for pkg state
'''
# Import Salt Testing libs
from salttesting import skipIf
from salttesting.helpers import (
destructiveTest,
ensure_in_syspath,
requires_system_grains,
requires_salt_modules
)
ensure_in_syspath('../../')
# Import python libs
import os
import time
# Import salt libs
import integration
import salt.utils
_PKG_TARGETS = {
'Arch': ['python2-django', 'libpng'],
'Debian': ['python-plist', 'apg'],
'RedHat': ['xz-devel', 'zsh-html'],
'FreeBSD': ['aalib', 'pth'],
'Suse': ['aalib', 'python-pssh']
}
_PKG_TARGETS_32 = {
'CentOS': 'xz-devel.i686'
}
# Test packages with dot in pkg name
# (https://github.com/saltstack/salt/issues/8614)
_PKG_TARGETS_DOT = {
'RedHat': {'5': 'python-migrate0.5',
'6': 'tomcat6-el-2.1-api',
'7': 'tomcat-el-2.2-api'}
}
@requires_salt_modules('pkg.version', 'pkg.latest_version')
class PkgTest(integration.ModuleCase,
integration.SaltReturnAssertsMixIn):
'''
pkg.installed state tests
'''
@destructiveTest
@skipIf(salt.utils.is_windows(), 'minion is windows')
@requires_system_grains
def test_pkg_001_installed(self, grains=None):
'''
This is a destructive test as it installs and then removes a package
'''
os_family = grains.get('os_family', '')
pkg_targets = _PKG_TARGETS.get(os_family, [])
# Make sure that we have targets that match the os_family. If this
# fails then the _PKG_TARGETS dict above needs to have an entry added,
# with two packages that are not installed before these tests are run
self.assertTrue(pkg_targets)
target = pkg_targets[0]
version = self.run_function('pkg.version', [target])
# If this assert fails, we need to find new targets, this test needs to
# be able to test successful installation of packages, so this package
# needs to not be installed before we run the states below
self.assertFalse(version)
ret = self.run_state('pkg.installed', name=target)
self.assertSaltTrueReturn(ret)
ret = self.run_state('pkg.removed', name=target)
self.assertSaltTrueReturn(ret)
@destructiveTest
@skipIf(salt.utils.is_windows(), 'minion is windows')
@requires_system_grains
def test_pkg_002_installed_with_version(self, grains=None):
'''
This is a destructive test as it installs and then removes a package
'''
os_family = grains.get('os_family', '')
pkg_targets = _PKG_TARGETS.get(os_family, [])
# Don't perform this test on FreeBSD since version specification is not
# supported.
if os_family == 'FreeBSD':
return
# Make sure that we have targets that match the os_family. If this
# fails then the _PKG_TARGETS dict above needs to have an entry added,
# with two packages that are not installed before these tests are run
self.assertTrue(pkg_targets)
if os_family == 'Arch':
for idx in xrange(13):
if idx == 12:
raise Exception('Package database locked after 60 seconds, '
'bailing out')
if not os.path.isfile('/var/lib/pacman/db.lck'):
break
time.sleep(5)
target = pkg_targets[0]
version = self.run_function('pkg.latest_version', [target])
# If this assert fails, we need to find new targets, this test needs to
# be able to test successful installation of packages, so this package
# needs to not be installed before we run the states below
self.assertTrue(version)
ret = self.run_state('pkg.installed', name=target, version=version)
self.assertSaltTrueReturn(ret)
ret = self.run_state('pkg.removed', name=target)
self.assertSaltTrueReturn(ret)
@destructiveTest
@skipIf(salt.utils.is_windows(), 'minion is windows')
@requires_system_grains
def test_pkg_003_installed_multipkg(self, grains=None):
'''
This is a destructive test as it installs and then removes two packages
'''
os_family = grains.get('os_family', '')
pkg_targets = _PKG_TARGETS.get(os_family, [])
# Make sure that we have targets that match the os_family. If this
# fails then the _PKG_TARGETS dict above needs to have an entry added,
# with two packages that are not installed before these tests are run
self.assertTrue(pkg_targets)
version = self.run_function('pkg.version', pkg_targets)
# If this assert fails, we need to find new targets, this test needs to
# be able to test successful installation of packages, so these
# packages need to not be installed before we run the states below
self.assertFalse(any(version.values()))
ret = self.run_state('pkg.installed', name=None, pkgs=pkg_targets)
self.assertSaltTrueReturn(ret)
ret = self.run_state('pkg.removed', name=None, pkgs=pkg_targets)
self.assertSaltTrueReturn(ret)
@destructiveTest
@skipIf(salt.utils.is_windows(), 'minion is windows')
@requires_system_grains
def test_pkg_004_installed_multipkg_with_version(self, grains=None):
'''
This is a destructive test as it installs and then removes two packages
'''
os_family = grains.get('os_family', '')
pkg_targets = _PKG_TARGETS.get(os_family, [])
# Don't perform this test on FreeBSD since version specification is not
# supported.
if os_family == 'FreeBSD':
return
# Make sure that we have targets that match the os_family. If this
# fails then the _PKG_TARGETS dict above needs to have an entry added,
# with two packages that are not installed before these tests are run
self.assertTrue(pkg_targets)
if os_family == 'Arch':
for idx in xrange(13):
if idx == 12:
raise Exception('Package database locked after 60 seconds, '
'bailing out')
if not os.path.isfile('/var/lib/pacman/db.lck'):
break
time.sleep(5)
version = self.run_function('pkg.latest_version', [pkg_targets[0]])
# If this assert fails, we need to find new targets, this test needs to
# be able to test successful installation of packages, so these
# packages need to not be installed before we run the states below
self.assertTrue(version)
pkgs = [{pkg_targets[0]: version}, pkg_targets[1]]
ret = self.run_state('pkg.installed', name=None, pkgs=pkgs)
self.assertSaltTrueReturn(ret)
ret = self.run_state('pkg.removed', name=None, pkgs=pkg_targets)
self.assertSaltTrueReturn(ret)
@destructiveTest
@skipIf(salt.utils.is_windows(), 'minion is windows')
@requires_system_grains
def test_pkg_005_installed_32bit(self, grains=None):
'''
This is a destructive test as it installs and then removes a package
'''
os_name = grains.get('os', '')
target = _PKG_TARGETS_32.get(os_name, '')
# _PKG_TARGETS_32 is only populated for platforms for which Salt has to
# munge package names for 32-bit-on-x86_64 (Currently only Ubuntu and
# RHEL-based). Don't actually perform this test on other platforms.
if target:
# CentOS 5 has .i386 arch designation for 32-bit pkgs
if os_name == 'CentOS' \
and grains['osrelease'].startswith('5.'):
target = target.replace('.i686', '.i386')
version = self.run_function('pkg.version', [target])
# If this assert fails, we need to find a new target. This test
# needs to be able to test successful installation of packages, so
# the target needs to not be installed before we run the states
# below
self.assertFalse(version)
ret = self.run_state('pkg.installed', name=target)
self.assertSaltTrueReturn(ret)
ret = self.run_state('pkg.removed', name=target)
self.assertSaltTrueReturn(ret)
@destructiveTest
@skipIf(salt.utils.is_windows(), 'minion is windows')
@requires_system_grains
def | (self, grains=None):
'''
This is a destructive test as it installs and then removes a package
'''
os_name = grains.get('os', '')
target = _PKG_TARGETS_32.get(os_name, '')
# _PKG_TARGETS_32 is only populated for platforms for which Salt has to
# munge package names for 32-bit-on-x86_64 (Currently only Ubuntu and
# RHEL-based). Don't actually perform this test on other platforms.
if target:
if grains.get('os_family', '') == 'Arch':
self._wait_for_pkgdb_unlock()
# CentOS 5 has .i386 arch designation for 32-bit pkgs
if os_name == 'CentOS' \
and grains['osrelease'].startswith('5.'):
target = target.replace('.i686', '.i386')
version = self.run_function('pkg.latest_version', [target])
# If this assert fails, we need to find a new target. This test
# needs to be able to test successful installation of the package, so
# the target needs to not be installed before we run the states
# below
self.assertTrue(version)
ret = self.run_state('pkg.installed', name=target, version=version)
self.assertSaltTrueReturn(ret)
ret = self.run_state('pkg.removed', name=target)
self.assertSaltTrueReturn(ret)
@destructiveTest
@skipIf(salt.utils.is_windows(), 'minion is windows')
@requires_system_grains
def test_pkg_with_dot_in_pkgname(self, grains=None):
'''
This tests for the regression found in the following issue:
https://github.com/saltstack/salt/issues/8614
This is a destructive test as it installs a package
'''
os_family = grains.get('os_family', '')
os_version = grains.get('osmajorrelease', [''])[0]
if os_family in _PKG_TARGETS_DOT:
target = _PKG_TARGETS_DOT.get(os_family, '').get(os_version, '')
else:
target = None
if target:
ret = self.run_state('pkg.installed', name=target)
self.assertSaltTrueReturn(ret)
if __name__ == '__main__':
from integration import run_tests
run_tests(PkgTest)
| test_pkg_006_installed_32bit_with_version |
version.go | // Code generated ./scripts/generate_version.bash DO NOT EDIT.
| package main
var version = "v2.0.2-5-g18307d0" |
|
torch_core.py | # AUTOGENERATED! DO NOT EDIT! File to edit: nbs/00_torch_core.ipynb (unless otherwise specified).
__all__ = ['progress_bar', 'master_bar', 'subplots', 'show_image', 'show_titled_image', 'show_images', 'ArrayBase',
'ArrayImageBase', 'ArrayImage', 'ArrayImageBW', 'ArrayMask', 'tensor', 'set_seed', 'get_random_states',
'set_random_states', 'no_random', 'unsqueeze', 'unsqueeze_', 'apply', 'maybe_gather', 'to_detach', 'to_half',
'to_float', 'default_device', 'to_device', 'to_cpu', 'to_np', 'to_concat', 'TensorBase', 'TensorImageBase',
'TensorImage', 'TensorImageBW', 'TensorMask', 'TensorCategory', 'TensorMultiCategory', 'TitledTensorScalar',
'concat', 'Chunks', 'show_title', 'ShowTitle', 'TitledInt', 'TitledFloat', 'TitledStr', 'TitledTuple',
'get_empty_df', 'display_df', 'get_first', 'one_param', 'item_find', 'find_device', 'find_bs', 'np_func',
'Module', 'get_model', 'one_hot', 'one_hot_decode', 'params', 'trainable_params', 'norm_types',
'norm_bias_params', 'batch_to_samples', 'logit', 'num_distrib', 'rank_distrib', 'distrib_barrier',
'base_doc', 'doc', 'nested_reorder', 'make_cross_image', 'show_image_batch', 'requires_grad', 'init_default',
'cond_init', 'apply_leaf', 'apply_init', 'script_use_ctx', 'script_save_ctx', 'script_fwd', 'script_bwd',
'grad_module', 'flatten_check']
# Cell
from .imports import *
from .torch_imports import *
# Cell
#nbdev_comment _all_ = ['progress_bar','master_bar']
# Cell
if torch.cuda.is_available():
if torch.cuda.current_device()==0:
def_gpu = int(os.environ.get('DEFAULT_GPU') or 0)
if torch.cuda.device_count()>=def_gpu: torch.cuda.set_device(def_gpu)
torch.backends.cudnn.benchmark = True
# Cell
@delegates(plt.subplots, keep=True)
def | (nrows=1, ncols=1, figsize=None, imsize=3,suptitle=None, **kwargs):
if figsize is None:
h=nrows*imsize if suptitle is None or imsize>2 else nrows*imsize+0.6 #https://github.com/matplotlib/matplotlib/issues/5355
figsize=(ncols*imsize, h)
fig,ax = plt.subplots(nrows, ncols, figsize=figsize, **kwargs)
if suptitle is not None: fig.suptitle(suptitle)
if nrows*ncols==1: ax = array([ax])
return fig,ax
# Cell
def _fig_bounds(x):
r = x//32
return min(5, max(1,r))
# Cell
@delegates(plt.Axes.imshow, keep=True, but=['shape', 'imlim'])
def show_image(im, ax=None, figsize=None, title=None, ctx=None, **kwargs):
"Show a PIL or PyTorch image on `ax`."
# Handle pytorch axis order
if hasattrs(im, ('data','cpu','permute')):
im = im.data.cpu()
if im.shape[0]<5: im=im.permute(1,2,0)
elif not isinstance(im,np.ndarray): im=array(im)
# Handle 1-channel images
if im.shape[-1]==1: im=im[...,0]
ax = ifnone(ax,ctx)
if figsize is None: figsize = (_fig_bounds(im.shape[0]), _fig_bounds(im.shape[1]))
if ax is None: _,ax = plt.subplots(figsize=figsize)
ax.imshow(im, **kwargs)
if title is not None: ax.set_title(title)
ax.axis('off')
return ax
# Cell
@delegates(show_image, keep=True)
def show_titled_image(o, **kwargs):
"Call `show_image` destructuring `o` to `(img,title)`"
show_image(o[0], title=str(o[1]), **kwargs)
# Cell
@delegates(subplots)
def show_images(ims, nrows=1, ncols=None, titles=None, **kwargs):
"Show all images `ims` as subplots with `rows` using `titles`."
if ncols is None: ncols = int(math.ceil(len(ims)/nrows))
if titles is None: titles = [None]*len(ims)
axs = subplots(nrows, ncols, **kwargs)[1].flat
for im,t,ax in zip(ims, titles, axs): show_image(im, ax=ax, title=t)
# Cell
class ArrayBase(ndarray):
"An `ndarray` that can modify casting behavior"
@classmethod
def _before_cast(cls, x): return x if isinstance(x,ndarray) else array(x)
# Cell
class ArrayImageBase(ArrayBase):
"Base class for arrays representing images"
_show_args = {'cmap':'viridis'}
def show(self, ctx=None, **kwargs):
return show_image(self, ctx=ctx, **{**self._show_args, **kwargs})
# Cell
class ArrayImage(ArrayImageBase):
"An array representing an image"
pass
# Cell
class ArrayImageBW(ArrayImage):
"An array representing an image"
_show_args = {'cmap':'Greys'}
# Cell
class ArrayMask(ArrayImageBase):
"An array representing an image mask"
_show_args = {'alpha':0.5, 'cmap':'tab20', 'interpolation':'nearest'}
# Cell
@patch
def __array_eq__(self:Tensor,b):
return torch.equal(self,b) if self.dim() else self==b
# Cell
def _array2tensor(x):
if x.dtype==np.uint16: x = x.astype(np.float32)
return torch.from_numpy(x)
# Cell
@use_kwargs_dict(dtype=None, device=None, requires_grad=False, pin_memory=False)
def tensor(x, *rest, **kwargs):
"Like `torch.as_tensor`, but handle lists too, and can pass multiple vector elements directly."
if len(rest): x = (x,)+rest
# There was a Pytorch bug in dataloader using num_workers>0. Haven't confirmed if fixed
# if isinstance(x, (tuple,list)) and len(x)==0: return tensor(0)
res = (x if isinstance(x, Tensor)
else torch.tensor(x, **kwargs) if isinstance(x, (tuple,list))
else _array2tensor(x) if isinstance(x, ndarray)
else as_tensor(x.values, **kwargs) if isinstance(x, (pd.Series, pd.DataFrame))
else as_tensor(x, **kwargs) if hasattr(x, '__array__') or is_iter(x)
else _array2tensor(array(x), **kwargs))
if res.dtype is torch.float64: return res.float()
return res
# Cell
def set_seed(s, reproducible=False):
"Set random seed for `random`, `torch`, and `numpy` (where available)"
try: torch.manual_seed(s)
except NameError: pass
try: torch.cuda.manual_seed_all(s)
except NameError: pass
try: np.random.seed(s%(2**32-1))
except NameError: pass
random.seed(s)
if reproducible:
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
# Cell
def get_random_states():
"Gets states for `random`, `torch`, and `numpy` random number generators"
return {'random_state':random.getstate(),
'numpy_state':np.random.get_state(),
'torch_state':torch.get_rng_state(),
'torch_cuda_state':torch.cuda.get_rng_state_all(),
'torch_deterministic':torch.backends.cudnn.deterministic,
'torch_benchmark':torch.backends.cudnn.benchmark}
# Cell
def set_random_states(random_state,numpy_state,torch_state,torch_cuda_state,torch_deterministic,torch_benchmark):
"Set states for `random`, `torch`, and `numpy` random number generators"
random.setstate(random_state)
np.random.set_state(numpy_state)
torch.set_rng_state(torch_state)
torch.cuda.set_rng_state_all(torch_cuda_state)
torch.backends.cudnn.deterministic=torch_deterministic
torch.backends.cudnn.benchmark=torch_benchmark
# Cell
@contextmanager
def no_random(seed=42,reproducible=True):
"Stores and retrieves state of random number generators. Sets random seed for `random`, `torch`, and `numpy`."
states = get_random_states()
set_seed(seed,reproducible=reproducible)
try:
yield #we are managing global variables
finally:
set_random_states(**states)
# Cell
def unsqueeze(x, dim=-1, n=1):
"Same as `torch.unsqueeze` but can add `n` dims"
for _ in range(n): x = x.unsqueeze(dim)
return x
# Cell
def unsqueeze_(x, dim=-1, n=1):
"Same as `torch.unsqueeze_` but can add `n` dims"
for _ in range(n): x.unsqueeze_(dim)
return x
# Cell
def _fa_rebuild_tensor (cls, *args, **kwargs): return cls(torch._utils._rebuild_tensor_v2(*args, **kwargs))
def _fa_rebuild_qtensor(cls, *args, **kwargs): return cls(torch._utils._rebuild_qtensor (*args, **kwargs))
# Cell
def apply(func, x, *args, **kwargs):
"Apply `func` recursively to `x`, passing on args"
if is_listy(x): return type(x)([apply(func, o, *args, **kwargs) for o in x])
if isinstance(x,dict): return {k: apply(func, v, *args, **kwargs) for k,v in x.items()}
res = func(x, *args, **kwargs)
return res if x is None else retain_type(res, x)
# Cell
def maybe_gather(x, axis=0):
"Gather copies of `x` on `axis` (if training is distributed)"
if num_distrib()<=1: return x
ndim = x.ndim
res = [x.new_zeros(*x.shape if ndim > 0 else (1,)) for _ in range(num_distrib())]
torch.distributed.all_gather(res, x.contiguous() if ndim > 0 else x[None])
return torch.cat(res, dim=axis) if ndim > 0 else torch.cat(res, dim=axis).mean()
# Cell
def to_detach(b, cpu=True, gather=True):
"Recursively detach lists of tensors in `b `; put them on the CPU if `cpu=True`."
def _inner(x, cpu=True, gather=True):
if not isinstance(x,Tensor): return x
x = x.detach()
if gather: x = maybe_gather(x)
return x.cpu() if cpu else x
return apply(_inner, b, cpu=cpu, gather=gather)
# Cell
def to_half(b):
"Recursively map lists of tensors in `b ` to FP16."
return apply(lambda x: x.half() if torch.is_floating_point(x) else x, b)
# Cell
def to_float(b):
"Recursively map lists of int tensors in `b ` to float."
return apply(lambda x: x.float() if torch.is_floating_point(x) else x, b)
# Cell
# None: True if available; True: error if not available; False: use CPU
defaults.use_cuda = None
# Cell
def default_device(use_cuda=-1):
"Return or set default device; `use_cuda`: None - CUDA if available; True - error if not available; False - CPU"
if use_cuda != -1: defaults.use_cuda=use_cuda
use = defaults.use_cuda or (torch.cuda.is_available() and defaults.use_cuda is None)
assert torch.cuda.is_available() or not use
return torch.device(torch.cuda.current_device()) if use else torch.device('cpu')
# Cell
def to_device(b, device=None):
"Recursively put `b` on `device`."
if defaults.use_cuda==False: device='cpu'
elif device is None: device=default_device()
def _inner(o): return o.to(device, non_blocking=True) if isinstance(o,Tensor) else o.to_device(device) if hasattr(o, "to_device") else o
return apply(_inner, b)
# Cell
def to_cpu(b):
"Recursively map lists of tensors in `b ` to the cpu."
return to_device(b,'cpu')
# Cell
def to_np(x):
"Convert a tensor to a numpy array."
return apply(lambda o: o.data.cpu().numpy(), x)
# Cell
def to_concat(xs, dim=0):
"Concat the element in `xs` (recursively if they are tuples/lists of tensors)"
if not xs: return xs
if is_listy(xs[0]): return type(xs[0])([to_concat([x[i] for x in xs], dim=dim) for i in range_of(xs[0])])
if isinstance(xs[0],dict): return {k: to_concat([x[k] for x in xs], dim=dim) for k in xs[0].keys()}
#We may receive xs that are not concatenable (inputs of a text classifier for instance),
# in this case we return a big list
try: return retain_type(torch.cat(xs, dim=dim), xs[0])
except: return sum([L(retain_type(o_.index_select(dim, tensor(i)).squeeze(dim), xs[0])
for i in range_of(o_)) for o_ in xs], L())
# Cell
@patch
def set_meta(self:Tensor, x, as_copy=False):
"Set all metadata in `__dict__`"
if not hasattr(x,'__dict__'): return
self.__dict__ = deepcopy(x.__dict__) if as_copy else x.__dict__
# Cell
if not hasattr(torch,'as_subclass'): torch.as_subclass = torch.Tensor.as_subclass
# Cell
@patch
def as_subclass(self:Tensor, typ):
"Cast to `typ` and include `__dict__` and meta"
return retain_meta(self, torch.as_subclass(self, typ))
# Cell
def _convert(ret, cls):
if isinstance(ret, torch.Tensor): ret = ret.as_subclass(cls)
if isinstance(ret, (tuple, list)): ret = type(ret)(_convert(r, cls) for r in ret)
return ret
# Cell
class TensorBase(Tensor):
def __new__(cls, x, **kwargs):
res = cast(tensor(x), cls)
for k,v in kwargs.items(): setattr(res, k, v)
return res
@classmethod
def _before_cast(cls, x): return tensor(x)
def __repr__(self): return re.sub('tensor', self.__class__.__name__, super().__repr__())
def __reduce_ex__(self,proto):
torch.utils.hooks.warn_if_has_hooks(self)
args = (type(self), self.storage(), self.storage_offset(), tuple(self.size()), self.stride())
if self.is_quantized: args = args + (self.q_scale(), self.q_zero_point())
f = _fa_rebuild_qtensor if self.is_quantized else _fa_rebuild_tensor
return (f, args + (self.requires_grad, OrderedDict()))
def __torch_function__(self, func, types, args=(), kwargs=None):
with torch._C.DisableTorchFunction(): ret = _convert(func(*args, **(kwargs or {})), self.__class__)
if isinstance(ret, TensorBase): ret.set_meta(self, as_copy=True)
return ret
def new_tensor(self, size, dtype=None, device=None, requires_grad=False):
cls = type(self)
return self.as_subclass(Tensor).new_tensor(size, dtype=dtype, device=device, requires_grad=requires_grad).as_subclass(cls)
def new_ones(self, data, dtype=None, device=None, requires_grad=False):
cls = type(self)
return self.as_subclass(Tensor).new_ones(data, dtype=dtype, device=device, requires_grad=requires_grad).as_subclass(cls)
def new(self, x):
cls = type(self)
return self.as_subclass(Tensor).new(x).as_subclass(cls)
# Cell
class TensorImageBase(TensorBase):
_show_args = ArrayImageBase._show_args
def show(self, ctx=None, **kwargs):
return show_image(self, ctx=ctx, **{**self._show_args, **kwargs})
# Cell
class TensorImage(TensorImageBase): pass
# Cell
class TensorImageBW(TensorImage): _show_args = ArrayImageBW._show_args
# Cell
class TensorMask(TensorImageBase):
_show_args = ArrayMask._show_args
def show(self, ctx=None, **kwargs):
codes = getattr(self, 'codes', None)
if codes is not None: kwargs = merge({'vmin': 1, 'vmax': len(codes)}, kwargs)
return super().show(ctx=ctx, **kwargs)
# Cell
class TensorCategory(TensorBase): pass
# Cell
class TensorMultiCategory(TensorCategory): pass
# Cell
class TitledTensorScalar(TensorBase):
"A tensor containing a scalar that has a `show` method"
def show(self, **kwargs): show_title(self.item(), **kwargs)
# Cell
@patch
def tensored(self:L):
"`mapped(tensor)`"
return self.map(tensor)
@patch
def stack(self:L, dim=0):
"Same as `torch.stack`"
return torch.stack(list(self.tensored()), dim=dim)
@patch
def cat (self:L, dim=0):
"Same as `torch.cat`"
return torch.cat (list(self.tensored()), dim=dim)
# Cell
def concat(*ls):
"Concatenate tensors, arrays, lists, or tuples"
if not len(ls): return []
it = ls[0]
if isinstance(it,torch.Tensor): res = torch.cat(ls)
elif isinstance(it,ndarray): res = np.concatenate(ls)
else:
res = itertools.chain.from_iterable(map(L,ls))
if isinstance(it,(tuple,list)): res = type(it)(res)
else: res = L(res)
return retain_type(res, it)
# Cell
class Chunks:
"Slice and int indexing into a list of lists"
def __init__(self, chunks, lens=None):
self.chunks = chunks
self.lens = L(map(len,self.chunks) if lens is None else lens)
self.cumlens = np.cumsum(0+self.lens)
self.totlen = self.cumlens[-1]
def __getitem__(self,i):
if isinstance(i,slice): return retain_type(self.getslice(i), old=self.chunks[0])
di,idx = self.doc_idx(i)
return retain_type(self.chunks[di][idx], old=self.chunks[0])
def getslice(self, i):
st_d,st_i = self.doc_idx(ifnone(i.start,0))
en_d,en_i = self.doc_idx(ifnone(i.stop,self.totlen+1))
res = [self.chunks[st_d][st_i:(en_i if st_d==en_d else sys.maxsize)]]
for b in range(st_d+1,en_d): res.append(self.chunks[b])
if st_d!=en_d and en_d<len(self.chunks): res.append(self.chunks[en_d][:en_i])
return concat(*res)
def doc_idx(self, i):
if i<0: i=self.totlen+i # count from end
docidx = np.searchsorted(self.cumlens, i+1)-1
cl = self.cumlens[docidx]
return docidx,i-cl
# Cell
def show_title(o, ax=None, ctx=None, label=None, color='black', **kwargs):
"Set title of `ax` to `o`, or print `o` if `ax` is `None`"
ax = ifnone(ax,ctx)
if ax is None: print(o)
elif hasattr(ax, 'set_title'):
t = ax.title.get_text()
if len(t) > 0: o = t+'\n'+str(o)
ax.set_title(o, color=color)
elif isinstance(ax, pd.Series):
while label in ax: label += '_'
ax = ax.append(pd.Series({label: o}))
return ax
# Cell
class ShowTitle:
"Base class that adds a simple `show`"
_show_args = {'label': 'text'}
def show(self, ctx=None, **kwargs):
"Show self"
return show_title(str(self), ctx=ctx, **merge(self._show_args, kwargs))
class TitledInt(Int, ShowTitle):
_show_args = {'label': 'text'}
def show(self, ctx=None, **kwargs):
"Show self"
return show_title(str(self), ctx=ctx, **merge(self._show_args, kwargs))
class TitledFloat(Float, ShowTitle):
_show_args = {'label': 'text'}
def show(self, ctx=None, **kwargs):
"Show self"
return show_title(str(self), ctx=ctx, **merge(self._show_args, kwargs))
class TitledStr(Str, ShowTitle):
_show_args = {'label': 'text'}
def show(self, ctx=None, **kwargs):
"Show self"
return show_title(str(self), ctx=ctx, **merge(self._show_args, kwargs))
class TitledTuple(fastuple, ShowTitle):
_show_args = {'label': 'text'}
def show(self, ctx=None, **kwargs):
"Show self"
return show_title(str(self), ctx=ctx, **merge(self._show_args, kwargs))
add_docs(TitledInt, "An `int` with `show`"); add_docs(TitledStr, "An `str` with `show`");
add_docs(TitledFloat, "A `float` with `show`"); add_docs(TitledTuple, "A `fastuple` with `show`")
# Cell
@patch
def truncate(self:TitledStr, n):
"Truncate self to `n`"
words = self.split(' ')[:n]
return TitledStr(' '.join(words))
# Cell
if not hasattr(pd.DataFrame,'_old_init'): pd.DataFrame._old_init = pd.DataFrame.__init__
# Cell
@patch
def __init__(self:pd.DataFrame, data=None, index=None, columns=None, dtype=None, copy=False):
if data is not None and isinstance(data, Tensor): data = to_np(data)
self._old_init(data, index=index, columns=columns, dtype=dtype, copy=copy)
# Cell
def get_empty_df(n):
"Return `n` empty rows of a dataframe"
df = pd.DataFrame(index = range(n))
return [df.iloc[i] for i in range(n)]
# Cell
def display_df(df):
"Display `df` in a notebook or defaults to print"
try: from IPython.display import display, HTML
except: return print(df)
display(HTML(df.to_html()))
# Cell
def get_first(c):
"Get the first element of c, even if c is a dataframe"
return getattr(c, 'iloc', c)[0]
# Cell
def one_param(m):
"First parameter in `m`"
return first(m.parameters())
# Cell
def item_find(x, idx=0):
"Recursively takes the `idx`-th element of `x`"
if is_listy(x): return item_find(x[idx])
if isinstance(x,dict):
key = list(x.keys())[idx] if isinstance(idx, int) else idx
return item_find(x[key])
return x
# Cell
def find_device(b):
"Recursively search the device of `b`."
return item_find(b).device
# Cell
def find_bs(b):
"Recursively search the batch size of `b`."
return item_find(b).shape[0]
# Cell
def np_func(f):
"Convert a function taking and returning numpy arrays to one taking and returning tensors"
def _inner(*args, **kwargs):
nargs = [to_np(arg) if isinstance(arg,Tensor) else arg for arg in args]
return tensor(f(*nargs, **kwargs))
functools.update_wrapper(_inner, f)
return _inner
# Cell
class Module(nn.Module, metaclass=PrePostInitMeta):
"Same as `nn.Module`, but no need for subclasses to call `super().__init__`"
def __pre_init__(self, *args, **kwargs): super().__init__()
def __init__(self): pass
# Cell
from torch.nn.parallel import DistributedDataParallel
# Cell
def get_model(model):
"Return the model maybe wrapped inside `model`."
return model.module if isinstance(model, (DistributedDataParallel, nn.DataParallel)) else model
# Cell
def one_hot(x, c):
"One-hot encode `x` with `c` classes."
res = torch.zeros(c, dtype=torch.uint8)
if isinstance(x, Tensor) and x.numel()>0: res[x] = 1.
else: res[list(L(x, use_list=None))] = 1.
return res
# Cell
def one_hot_decode(x, vocab=None):
return L(vocab[i] if vocab else i for i,x_ in enumerate(x) if x_==1)
# Cell
def params(m):
"Return all parameters of `m`"
return [p for p in m.parameters()]
# Cell
def trainable_params(m):
"Return all trainable parameters of `m`"
return [p for p in m.parameters() if p.requires_grad]
# Cell
norm_types = (nn.BatchNorm1d, nn.BatchNorm2d, nn.BatchNorm3d, nn.InstanceNorm1d, nn.InstanceNorm2d, nn.InstanceNorm3d, nn.LayerNorm)
# Cell
def norm_bias_params(m, with_bias=True):
"Return all bias and BatchNorm parameters"
if isinstance(m, norm_types): return L(m.parameters())
res = L(m.children()).map(norm_bias_params, with_bias=with_bias).concat()
if with_bias and getattr(m, 'bias', None) is not None: res.append(m.bias)
return res
# Cell
def batch_to_samples(b, max_n=10):
"'Transposes' a batch to (at most `max_n`) samples"
if isinstance(b, Tensor): return retain_types(list(b[:max_n]), [b])
else:
res = L(b).map(partial(batch_to_samples,max_n=max_n))
return retain_types(res.zip(), [b])
# Cell
@patch
def interp_1d(x:Tensor, xp, fp):
"Same as `np.interp`"
slopes = (fp[1:]-fp[:-1])/(xp[1:]-xp[:-1])
incx = fp[:-1] - (slopes*xp[:-1])
locs = (x[:,None]>=xp[None,:]).long().sum(1)-1
locs = locs.clamp(0,len(slopes)-1)
return slopes[locs]*x + incx[locs]
# Cell
@patch
def pca(x:Tensor, k=2):
"Compute PCA of `x` with `k` dimensions."
x = x-torch.mean(x,0)
U,S,V = torch.svd(x.t())
return torch.mm(x,U[:,:k])
# Cell
def logit(x):
"Logit of `x`, clamped to avoid inf."
x = x.clamp(1e-7, 1-1e-7)
return -(1/x-1).log()
# Cell
def num_distrib():
"Return the number of processes in distributed training (if applicable)."
return int(os.environ.get('WORLD_SIZE', 0))
# Cell
def rank_distrib():
"Return the distributed rank of this process (if applicable)."
return int(os.environ.get('RANK', 0))
# Cell
def distrib_barrier():
"Place a synchronization barrier in distributed training so that ALL sub-processes in the pytorch process group must arrive here before proceeding."
if num_distrib() > 1 and torch.distributed.is_initialized(): torch.distributed.barrier()
# Cell
# Saving arrays requires pytables - optional dependency
try: import tables
except: pass
# Cell
def _comp_filter(lib='lz4',lvl=3): return tables.Filters(complib=f'blosc:{lib}', complevel=lvl)
# Cell
@patch
def save_array(p:Path, o, complib='lz4', lvl=3):
"Save numpy array to a compressed `pytables` file, using compression level `lvl`"
if isinstance(o,Tensor): o = to_np(o)
with tables.open_file(p, mode='w', filters=_comp_filter(lib=complib,lvl=lvl)) as f: f.create_carray('/', 'data', obj=o)
# Cell
@patch
def load_array(p:Path):
"Save numpy array to a `pytables` file"
with tables.open_file(p, 'r') as f: return f.root.data.read()
# Cell
def base_doc(elt):
"Print a base documentation of `elt`"
name = getattr(elt, '__qualname__', getattr(elt, '__name__', ''))
print(f'{name}{inspect.signature(elt)}\n{inspect.getdoc(elt)}\n')
print('To get a prettier result with hyperlinks to source code and documentation, install nbdev: pip install nbdev')
# Cell
def doc(elt):
"Try to use doc form nbdev and fall back to `base_doc`"
try:
from nbdev.showdoc import doc
doc(elt)
except: base_doc(elt)
# Cell
def nested_reorder(t, idxs):
"Reorder all tensors in `t` using `idxs`"
if isinstance(t, (Tensor,L)): return t[idxs]
elif is_listy(t): return type(t)(nested_reorder(t_, idxs) for t_ in t)
if t is None: return t
raise TypeError(f"Expected tensor, tuple, list or L but got {type(t)}")
# Cell
def make_cross_image(bw=True):
"Create a tensor containing a cross image, either `bw` (True) or color"
if bw:
im = torch.zeros(5,5)
im[2,:] = 1.
im[:,2] = 1.
else:
im = torch.zeros(3,5,5)
im[0,2,:] = 1.
im[1,:,2] = 1.
return im
# Cell
def show_image_batch(b, show=show_titled_image, items=9, cols=3, figsize=None, **kwargs):
"Display batch `b` in a grid of size `items` with `cols` width"
if items<cols: cols=items
rows = (items+cols-1) // cols
if figsize is None: figsize = (cols*3, rows*3)
fig,axs = plt.subplots(rows, cols, figsize=figsize)
for *o,ax in zip(*to_cpu(b), axs.flatten()): show(o, ax=ax, **kwargs)
# Cell
def requires_grad(m):
"Check if the first parameter of `m` requires grad or not"
ps = list(m.parameters())
return ps[0].requires_grad if len(ps)>0 else False
# Cell
def init_default(m, func=nn.init.kaiming_normal_):
"Initialize `m` weights with `func` and set `bias` to 0."
if func:
if hasattr(m, 'weight'): func(m.weight)
if hasattr(m, 'bias') and hasattr(m.bias, 'data'): m.bias.data.fill_(0.)
return m
# Cell
def cond_init(m, func):
"Apply `init_default` to `m` unless it's a batchnorm module"
if (not isinstance(m, norm_types)) and requires_grad(m): init_default(m, func)
# Cell
def apply_leaf(m, f):
"Apply `f` to children of `m`."
c = m.children()
if isinstance(m, nn.Module): f(m)
for l in c: apply_leaf(l,f)
# Cell
def apply_init(m, func=nn.init.kaiming_normal_):
"Initialize all non-batchnorm layers of `m` with `func`."
apply_leaf(m, partial(cond_init, func=func))
# Cell
def script_use_ctx(f):
"Decorator: create jit script and pass everything in `ctx.saved_variables to `f`, after `*args`"
sf = torch.jit.script(f)
def _f(ctx, *args, **kwargs): return sf(*args, *ctx.saved_variables, **kwargs)
return update_wrapper(_f,f)
# Cell
def script_save_ctx(static, *argidx):
"Decorator: create jit script and save args with indices `argidx` using `ctx.save_for_backward`"
def _dec(f):
sf = torch.jit.script(f)
def _f(ctx, *args, **kwargs):
if argidx:
save = [args[o] for o in argidx]
ctx.save_for_backward(*save)
if not argidx: args = [ctx]+args
return sf(*args, **kwargs)
if static: _f = staticmethod(_f)
return update_wrapper(_f,f)
return _dec
# Cell
def script_fwd(*argidx):
"Decorator: create static jit script and save args with indices `argidx` using `ctx.save_for_backward`"
return script_save_ctx(True, *argidx)
# Cell
def script_bwd(f):
"Decorator: create static jit script and pass everything in `ctx.saved_variables to `f`, after `*args`"
return staticmethod(script_use_ctx(f))
# Cell
def grad_module(cls):
"Decorator: convert `cls` into an autograd function"
class _c(nn.Module):
def forward(self, *args, **kwargs): return cls.apply(*args, **kwargs)
return _c
# Comes from 13b_metrics.ipynb, cell
def flatten_check(inp, targ):
"Check that `out` and `targ` have the same number of elements and flatten them."
inp,targ = inp.contiguous().view(-1),targ.contiguous().view(-1)
test_eq(len(inp), len(targ))
return inp,targ | subplots |
parse-cache.ts | import { QueryError } from './interfaces.ts';
import LRUCache from 'https://deno.land/x/[email protected]/mod.ts';
import hash from 'https://deno.land/x/[email protected]/mod.ts';
import { Expr, parse, Statement } from 'https://deno.land/x/[email protected]/mod.ts';
import { errorMessage } from './utils.ts';
| const astCache: LRUCache<any, any> = new LRUCache({
max: 1000,
});
let locationTracking = false;
export function enableStatementLocationTracking() {
locationTracking = true;
astCache.reset();
}
/** Parse an AST from SQL */
export function parseSql(sql: string): Statement[];
export function parseSql(sql: string, entry: 'expr'): Expr;
export function parseSql(sql: string, entry?: string): any {
// when 'entry' is not specified, lets cache parsings
// => better perf on repetitive requests
const key = !entry && hash(sql);
if (!entry) {
const cached = astCache.get(key);
if (cached) {
return cached;
}
}
try {
let ret = parse(sql, {
entry,
locationTracking,
});
// cache result
if (!entry) {
astCache.set(key, ret);
}
return ret;
} catch (e) {
const msg = errorMessage(e);
if (!/Syntax error/.test(msg)) {
throw e;
}
// throw a nice parsing error.
throw new QueryError(`💔 Your query failed to parse.
This is most likely due to a SQL syntax error. However, you might also have hit a bug, or an unimplemented feature of pg-mem.
If this is the case, please file an issue at https://github.com/oguimbal/pg-mem along with a query that reproduces this syntax error.
👉 Failed query:
${sql}
💀 ${msg}`);
}
} | |
lifecycle.go | /*
* MinIO Cloud Storage, (C) 2019 MinIO, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package lifecycle
import (
"encoding/xml"
"errors"
"io"
"strings"
)
var (
errLifecycleTooManyRules = errors.New("Lifecycle configuration allows a maximum of 1000 rules")
errLifecycleNoRule = errors.New("Lifecycle configuration should have at least one rule")
errLifecycleOverlappingPrefix = errors.New("Lifecycle configuration has rules with overlapping prefix")
)
// Lifecycle - Configuration for bucket lifecycle.
type Lifecycle struct {
XMLName xml.Name `xml:"LifecycleConfiguration"`
Rules []Rule `xml:"Rule"`
}
// IsEmpty - returns whether policy is empty or not.
func (lc Lifecycle) IsEmpty() bool {
return len(lc.Rules) == 0
}
// ParseLifecycleConfig - parses data in given reader to Lifecycle.
func ParseLifecycleConfig(reader io.Reader) (*Lifecycle, error) |
// Validate - validates the lifecycle configuration
func (lc Lifecycle) Validate() error {
// Lifecycle config can't have more than 1000 rules
if len(lc.Rules) > 1000 {
return errLifecycleTooManyRules
}
// Lifecycle config should have at least one rule
if len(lc.Rules) == 0 {
return errLifecycleNoRule
}
// Validate all the rules in the lifecycle config
for _, r := range lc.Rules {
if err := r.Validate(); err != nil {
return err
}
}
// Compare every rule's prefix with every other rule's prefix
for i := range lc.Rules {
if i == len(lc.Rules)-1 {
break
}
// N B Empty prefixes overlap with all prefixes
otherRules := lc.Rules[i+1:]
for _, otherRule := range otherRules {
if strings.HasPrefix(lc.Rules[i].Filter.Prefix, otherRule.Filter.Prefix) ||
strings.HasPrefix(otherRule.Filter.Prefix, lc.Rules[i].Filter.Prefix) {
return errLifecycleOverlappingPrefix
}
}
}
return nil
}
| {
var lc Lifecycle
if err := xml.NewDecoder(reader).Decode(&lc); err != nil {
return nil, err
}
if err := lc.Validate(); err != nil {
return nil, err
}
return &lc, nil
} |
main.dev.ts | /* eslint global-require: off, no-console: off */
/**
* This module executes inside of electron's main process. You can start
* electron renderer process from here and communicate with the other processes
* through IPC.
*
* When running `yarn build` or `yarn build-main`, this file is compiled to
* `./app/main.prod.js` using webpack. This gives us some performance wins.
*/
import path from 'path'
import { app, BrowserWindow, ipcMain, Menu } from 'electron'
import { autoUpdater } from 'electron-updater'
import log from 'electron-log'
import openAboutWindow from 'about-window'
import MenuBuilder from './menu'
const appInfo = require('../package.json')
export default class AppUpdater {
constructor() {
log.transports.file.level = 'info'
autoUpdater.logger = log
autoUpdater.checkForUpdatesAndNotify()
}
}
let mainWindow: BrowserWindow | null = null
if (process.env.NODE_ENV === 'production') {
const sourceMapSupport = require('source-map-support')
sourceMapSupport.install()
}
if (
process.env.NODE_ENV === 'development' ||
process.env.DEBUG_PROD === 'true'
) {
require('electron-debug')()
}
const installExtensions = async () => {
const installer = require('electron-devtools-installer')
const forceDownload = !!process.env.UPGRADE_EXTENSIONS
const extensions = ['REACT_DEVELOPER_TOOLS', 'REDUX_DEVTOOLS']
return Promise.all(
extensions.map(name => installer.default(installer[name], forceDownload))
).catch(console.log)
}
const createWindow = async () => {
if (
process.env.NODE_ENV === 'development' ||
process.env.DEBUG_PROD === 'true'
) {
await installExtensions()
}
mainWindow = new BrowserWindow({
show: false,
width: 520,
height: 360,
webPreferences:
process.env.NODE_ENV === 'development' || process.env.E2E_BUILD === 'true'
? {
nodeIntegration: true,
}
: {
nodeIntegration: true,
preload: path.join(__dirname, 'dist/renderer.prod.js'),
},
})
mainWindow.loadURL(`file://${__dirname}/app.html`)
// @TODO: Use 'ready-to-show' event
// https://github.com/electron/electron/blob/master/docs/api/browser-window.md#using-ready-to-show-event
mainWindow.webContents.on('did-finish-load', () => {
if (!mainWindow) {
throw new Error('"mainWindow" is not defined')
}
if (process.env.START_MINIMIZED) {
mainWindow.minimize()
} else {
mainWindow.show()
mainWindow.focus()
}
})
mainWindow.on('closed', () => {
mainWindow = null
})
if (process.env.NODE_ENV === 'development') {
const menuBuilder = new MenuBuilder(mainWindow)
menuBuilder.buildMenu()
} else {
Menu.setApplicationMenu(null)
}
// Remove this if your app does not use auto updates
// eslint-disable-next-line
new AppUpdater()
}
| */
app.on('window-all-closed', () => {
// Respect the OSX convention of having the application in memory even
// after all windows have been closed
if (process.platform !== 'darwin') {
app.quit()
}
})
app.on('ready', createWindow)
app.on('activate', () => {
// On macOS it's common to re-create a window in the app when the
// dock icon is clicked and there are no other windows open.
if (mainWindow === null) createWindow()
})
// Drag-n-drop listeners
const onOpen = data => {
console.log(data)
}
app.on('open-file', onOpen)
app.on('open-url', onOpen)
ipcMain.on('app-quit', () => {
if (process.platform !== 'darwin') {
app.exit()
}
})
log.info('main1', process.cwd())
log.info('main2', __dirname)
ipcMain.on('open-about', () => {
openAboutWindow({
icon_path: path.join(process.cwd(), 'resources/icons/256x256.png'),
package_json_dir: path.join(__dirname, '/../'),
product_name: appInfo.name,
bug_report_url: 'https://git.newtelco.dev/newtelco/billing-parser-1/issue',
copyright: '2020 Newtelco GmbH',
homepage: 'https://git.newtelco.dev',
description: appInfo.description,
license: appInfo.license,
adjust_window_size: true,
win_options: {
resizable: false,
},
show_close_button: 'Close',
})
}) | /**
* Add event listeners... |
format.go | package version
import (
"strings"
"github.com/anchore/syft/syft/pkg"
)
const (
UnknownFormat Format = iota
SemanticFormat
DebFormat
RpmFormat
PythonFormat
)
type Format int
var formatStr = []string{
"UnknownFormat",
"Semantic",
"Deb",
"RPM",
"Python",
}
var Formats = []Format{
SemanticFormat,
DebFormat,
RpmFormat,
PythonFormat,
}
func ParseFormat(userStr string) Format |
func FormatFromPkgType(t pkg.Type) Format {
var format Format
switch t {
case pkg.DebPkg:
format = DebFormat
case pkg.RpmPkg:
format = RpmFormat
case pkg.BundlerPkg:
format = SemanticFormat
case pkg.EggPkg:
format = PythonFormat
case pkg.WheelPkg:
format = PythonFormat
default:
format = UnknownFormat
}
return format
}
func (f Format) String() string {
if int(f) >= len(formatStr) || f < 0 {
return formatStr[0]
}
return formatStr[f]
}
| {
switch strings.ToLower(userStr) {
case strings.ToLower(SemanticFormat.String()), "semver":
return SemanticFormat
case strings.ToLower(DebFormat.String()), "dpkg":
return DebFormat
case strings.ToLower(RpmFormat.String()), "rpmdb":
return RpmFormat
case strings.ToLower(PythonFormat.String()), "python":
return PythonFormat
}
return UnknownFormat
} |
main.rs | use std::{io, path::PathBuf, process};
use structopt::{clap::Shell, StructOpt};
use nix_fix_rustup::{error, patch_toolchain};
#[derive(Debug, StructOpt)]
pub struct PathConfig {
/// The path of toolchain
#[structopt(parse(from_os_str))]
toolchain_path: PathBuf,
}
#[derive(Debug, StructOpt)]
#[structopt(
name = "fix-nixpkgs-rustup",
about = "A tools for patching rpath and linker for Rust toolchain installed via rustup.rs \
under Nix environment."
)]
pub enum Command {
#[structopt(about = "Shows current version")]
Version,
#[structopt(about = "Shows shell completions")]
Completions { shell: Shell },
#[structopt(about = "Fix Rust toolchain")]
Patch(Box<PathConfig>),
}
impl Command {
#[inline]
pub fn app_name() -> String { Command::clap().get_name().to_string() }
pub fn run(self) -> error::Result<()> {
match self {
Command::Version => {
Command::clap()
.write_version(&mut io::stdout())
.expect("failed to write to stdout");
Ok(())
}
Command::Completions { shell } => {
let app_name = Command::app_name();
Command::clap().gen_completions_to(app_name, shell, &mut io::stdout());
Ok(())
}
Command::Patch(config) => patch_toolchain(config.toolchain_path),
}
}
}
fn main() | {
if let Err(err) = Command::from_args().run() {
eprintln!("{}", err);
process::exit(-87);
}
} |
|
port.go | package udp
import "v2ray.com/core/common/net"
// PickPort returns an unused UDP port of the system.
func PickPort() net.Port {
conn := pickPort()
defer conn.Close()
addr := conn.LocalAddr().(*net.UDPAddr)
return net.Port(addr.Port)
}
func pickPort() *net.UDPConn {
conn, err := net.ListenUDP("udp4", &net.UDPAddr{
IP: net.LocalHostIP.IP(),
Port: 0,
})
if err != nil |
return conn
}
| {
conn = pickPort()
} |
operations.rs | #![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
AvailabilitySets_Get(#[from] availability_sets::get::Error),
#[error(transparent)]
AvailabilitySets_CreateOrUpdate(#[from] availability_sets::create_or_update::Error),
#[error(transparent)]
AvailabilitySets_Delete(#[from] availability_sets::delete::Error),
#[error(transparent)]
AvailabilitySets_ListBySubscription(#[from] availability_sets::list_by_subscription::Error),
#[error(transparent)]
AvailabilitySets_List(#[from] availability_sets::list::Error),
#[error(transparent)]
AvailabilitySets_ListAvailableSizes(#[from] availability_sets::list_available_sizes::Error),
#[error(transparent)]
VirtualMachineExtensionImages_Get(#[from] virtual_machine_extension_images::get::Error),
#[error(transparent)]
VirtualMachineExtensionImages_ListTypes(#[from] virtual_machine_extension_images::list_types::Error),
#[error(transparent)]
VirtualMachineExtensionImages_ListVersions(#[from] virtual_machine_extension_images::list_versions::Error),
#[error(transparent)]
VirtualMachineExtensions_Get(#[from] virtual_machine_extensions::get::Error),
#[error(transparent)]
VirtualMachineExtensions_CreateOrUpdate(#[from] virtual_machine_extensions::create_or_update::Error),
#[error(transparent)]
VirtualMachineExtensions_Update(#[from] virtual_machine_extensions::update::Error),
#[error(transparent)]
VirtualMachineExtensions_Delete(#[from] virtual_machine_extensions::delete::Error),
#[error(transparent)]
VirtualMachines_GetExtensions(#[from] virtual_machines::get_extensions::Error),
#[error(transparent)]
VirtualMachineImages_Get(#[from] virtual_machine_images::get::Error),
#[error(transparent)]
VirtualMachineImages_List(#[from] virtual_machine_images::list::Error),
#[error(transparent)]
VirtualMachineImages_ListOffers(#[from] virtual_machine_images::list_offers::Error),
#[error(transparent)]
VirtualMachineImages_ListPublishers(#[from] virtual_machine_images::list_publishers::Error),
#[error(transparent)]
VirtualMachineImages_ListSkus(#[from] virtual_machine_images::list_skus::Error),
#[error(transparent)]
Usage_List(#[from] usage::list::Error),
#[error(transparent)]
VirtualMachineSizes_List(#[from] virtual_machine_sizes::list::Error),
#[error(transparent)]
VirtualMachines_Capture(#[from] virtual_machines::capture::Error),
#[error(transparent)]
VirtualMachines_Get(#[from] virtual_machines::get::Error),
#[error(transparent)]
VirtualMachines_CreateOrUpdate(#[from] virtual_machines::create_or_update::Error),
#[error(transparent)]
VirtualMachines_Delete(#[from] virtual_machines::delete::Error),
#[error(transparent)]
VirtualMachines_Deallocate(#[from] virtual_machines::deallocate::Error),
#[error(transparent)]
VirtualMachines_Generalize(#[from] virtual_machines::generalize::Error),
#[error(transparent)]
VirtualMachines_List(#[from] virtual_machines::list::Error),
#[error(transparent)]
VirtualMachines_ListAll(#[from] virtual_machines::list_all::Error),
#[error(transparent)]
VirtualMachines_ListAvailableSizes(#[from] virtual_machines::list_available_sizes::Error),
#[error(transparent)]
VirtualMachines_PowerOff(#[from] virtual_machines::power_off::Error),
#[error(transparent)]
VirtualMachines_Restart(#[from] virtual_machines::restart::Error),
#[error(transparent)]
VirtualMachines_Start(#[from] virtual_machines::start::Error),
#[error(transparent)]
VirtualMachines_Redeploy(#[from] virtual_machines::redeploy::Error),
#[error(transparent)]
VirtualMachineScaleSets_Get(#[from] virtual_machine_scale_sets::get::Error),
#[error(transparent)]
VirtualMachineScaleSets_CreateOrUpdate(#[from] virtual_machine_scale_sets::create_or_update::Error),
#[error(transparent)]
VirtualMachineScaleSets_Delete(#[from] virtual_machine_scale_sets::delete::Error),
#[error(transparent)]
VirtualMachineScaleSets_Deallocate(#[from] virtual_machine_scale_sets::deallocate::Error),
#[error(transparent)]
VirtualMachineScaleSets_DeleteInstances(#[from] virtual_machine_scale_sets::delete_instances::Error),
#[error(transparent)]
VirtualMachineScaleSets_GetInstanceView(#[from] virtual_machine_scale_sets::get_instance_view::Error),
#[error(transparent)]
VirtualMachineScaleSets_List(#[from] virtual_machine_scale_sets::list::Error),
#[error(transparent)]
VirtualMachineScaleSets_ListAll(#[from] virtual_machine_scale_sets::list_all::Error),
#[error(transparent)]
VirtualMachineScaleSets_ListSkus(#[from] virtual_machine_scale_sets::list_skus::Error),
#[error(transparent)]
VirtualMachineScaleSets_PowerOff(#[from] virtual_machine_scale_sets::power_off::Error),
#[error(transparent)]
VirtualMachineScaleSets_Restart(#[from] virtual_machine_scale_sets::restart::Error),
#[error(transparent)]
VirtualMachineScaleSets_Start(#[from] virtual_machine_scale_sets::start::Error),
#[error(transparent)]
VirtualMachineScaleSets_UpdateInstances(#[from] virtual_machine_scale_sets::update_instances::Error),
#[error(transparent)]
VirtualMachineScaleSets_Reimage(#[from] virtual_machine_scale_sets::reimage::Error),
#[error(transparent)]
VirtualMachineScaleSetVMs_Reimage(#[from] virtual_machine_scale_set_v_ms::reimage::Error),
#[error(transparent)]
VirtualMachineScaleSetVMs_Deallocate(#[from] virtual_machine_scale_set_v_ms::deallocate::Error),
#[error(transparent)]
VirtualMachineScaleSetVMs_Get(#[from] virtual_machine_scale_set_v_ms::get::Error),
#[error(transparent)]
VirtualMachineScaleSetVMs_Delete(#[from] virtual_machine_scale_set_v_ms::delete::Error),
#[error(transparent)]
VirtualMachineScaleSetVMs_GetInstanceView(#[from] virtual_machine_scale_set_v_ms::get_instance_view::Error),
#[error(transparent)]
VirtualMachineScaleSetVMs_List(#[from] virtual_machine_scale_set_v_ms::list::Error),
#[error(transparent)]
VirtualMachineScaleSetVMs_PowerOff(#[from] virtual_machine_scale_set_v_ms::power_off::Error),
#[error(transparent)]
VirtualMachineScaleSetVMs_Restart(#[from] virtual_machine_scale_set_v_ms::restart::Error),
#[error(transparent)]
VirtualMachineScaleSetVMs_Start(#[from] virtual_machine_scale_set_v_ms::start::Error),
}
pub mod availability_sets {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
availability_set_name: &str,
subscription_id: &str,
) -> std::result::Result<models::AvailabilitySet, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/availabilitySets/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
availability_set_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AvailabilitySet =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
availability_set_name: &str,
parameters: &models::AvailabilitySet,
subscription_id: &str,
) -> std::result::Result<models::AvailabilitySet, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/availabilitySets/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
availability_set_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AvailabilitySet = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(create_or_update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
availability_set_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/availabilitySets/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
availability_set_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(delete::Response::Ok200(rsp_value))
}
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
Err(delete::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
expand: Option<&str>,
) -> std::result::Result<models::AvailabilitySetListResult, list_by_subscription::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/availabilitySets",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_by_subscription::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_subscription::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_subscription::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_subscription::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AvailabilitySetListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_by_subscription::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_by_subscription {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
) -> std::result::Result<models::AvailabilitySetListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/availabilitySets",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AvailabilitySetListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_available_sizes(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
availability_set_name: &str,
subscription_id: &str,
) -> std::result::Result<models::VirtualMachineSizeListResult, list_available_sizes::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/availabilitySets/{}/vmSizes",
operation_config.base_path(),
subscription_id,
resource_group_name,
availability_set_name
);
let mut url = url::Url::parse(url_str).map_err(list_available_sizes::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_available_sizes::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_available_sizes::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_available_sizes::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineSizeListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_available_sizes::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_available_sizes::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_available_sizes {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod virtual_machine_extension_images {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
location: &str,
publisher_name: &str,
type_: &str,
version: &str,
subscription_id: &str,
) -> std::result::Result<models::VirtualMachineExtensionImage, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/locations/{}/publishers/{}/artifacttypes/vmextension/types/{}/versions/{}",
operation_config.base_path(),
subscription_id,
location,
publisher_name,
type_,
version
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineExtensionImage =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_types(
operation_config: &crate::OperationConfig,
location: &str,
publisher_name: &str,
subscription_id: &str,
) -> std::result::Result<Vec<models::VirtualMachineExtensionImage>, list_types::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/locations/{}/publishers/{}/artifacttypes/vmextension/types",
operation_config.base_path(),
subscription_id,
location,
publisher_name
);
let mut url = url::Url::parse(url_str).map_err(list_types::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_types::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_types::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_types::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Vec<models::VirtualMachineExtensionImage> =
serde_json::from_slice(rsp_body).map_err(|source| list_types::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_types::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_types {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_versions(
operation_config: &crate::OperationConfig,
location: &str,
publisher_name: &str,
type_: &str,
filter: Option<&str>,
top: Option<i32>,
orderby: Option<&str>,
subscription_id: &str,
) -> std::result::Result<Vec<models::VirtualMachineExtensionImage>, list_versions::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/locations/{}/publishers/{}/artifacttypes/vmextension/types/{}/versions",
operation_config.base_path(),
subscription_id,
location,
publisher_name,
type_
);
let mut url = url::Url::parse(url_str).map_err(list_versions::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_versions::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(top) = top {
url.query_pairs_mut().append_pair("$top", top.to_string().as_str());
}
if let Some(orderby) = orderby {
url.query_pairs_mut().append_pair("$orderby", orderby);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_versions::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_versions::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Vec<models::VirtualMachineExtensionImage> =
serde_json::from_slice(rsp_body).map_err(|source| list_versions::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_versions::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_versions {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod virtual_machine_extensions {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
vm_extension_name: &str,
expand: Option<&str>,
subscription_id: &str,
) -> std::result::Result<models::VirtualMachineExtension, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/extensions/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name,
vm_extension_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineExtension =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
vm_extension_name: &str,
extension_parameters: &models::VirtualMachineExtension,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/extensions/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name,
vm_extension_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(extension_parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineExtension = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineExtension = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
Err(create_or_update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::VirtualMachineExtension),
Created201(models::VirtualMachineExtension),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
vm_extension_name: &str,
extension_parameters: &models::VirtualMachineExtensionUpdate,
subscription_id: &str,
) -> std::result::Result<models::VirtualMachineExtension, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/extensions/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name,
vm_extension_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(extension_parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineExtension =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
vm_extension_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/extensions/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name,
vm_extension_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(delete::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
Err(delete::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod virtual_machines {
use super::{models, API_VERSION};
pub async fn get_extensions(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
expand: Option<&str>,
subscription_id: &str,
) -> std::result::Result<models::VirtualMachineExtensionsListResult, get_extensions::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/extensions",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).map_err(get_extensions::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_extensions::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_extensions::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_extensions::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineExtensionsListResult =
serde_json::from_slice(rsp_body).map_err(|source| get_extensions::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_extensions::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_extensions {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn capture(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
parameters: &models::VirtualMachineCaptureParameters,
subscription_id: &str,
) -> std::result::Result<capture::Response, capture::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/capture",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).map_err(capture::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(capture::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(capture::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(capture::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(capture::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineCaptureResult =
serde_json::from_slice(rsp_body).map_err(|source| capture::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(capture::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(capture::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(capture::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod capture {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::VirtualMachineCaptureResult),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
expand: Option<&str>,
subscription_id: &str,
) -> std::result::Result<models::VirtualMachine, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachine =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
parameters: &models::VirtualMachine,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachine = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachine = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
Err(create_or_update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::VirtualMachine),
Created201(models::VirtualMachine),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(delete::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
Err(delete::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn deallocate(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
subscription_id: &str,
) -> std::result::Result<deallocate::Response, deallocate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/deallocate",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).map_err(deallocate::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(deallocate::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(deallocate::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(deallocate::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| deallocate::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(deallocate::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(deallocate::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(deallocate::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod deallocate {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn generalize(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
subscription_id: &str,
) -> std::result::Result<models::OperationStatusResponse, generalize::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/generalize",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).map_err(generalize::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(generalize::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(generalize::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(generalize::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| generalize::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(generalize::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod generalize {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
) -> std::result::Result<models::VirtualMachineListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_all(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::VirtualMachineListResult, list_all::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/virtualMachines",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_all::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_all::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_all::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_all::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineListResult =
serde_json::from_slice(rsp_body).map_err(|source| list_all::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_all::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_all {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_available_sizes(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
subscription_id: &str,
) -> std::result::Result<models::VirtualMachineSizeListResult, list_available_sizes::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/vmSizes",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).map_err(list_available_sizes::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_available_sizes::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_available_sizes::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_available_sizes::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineSizeListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_available_sizes::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_available_sizes::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_available_sizes {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn power_off(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
subscription_id: &str,
) -> std::result::Result<power_off::Response, power_off::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/powerOff",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).map_err(power_off::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(power_off::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(power_off::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(power_off::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| power_off::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(power_off::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(power_off::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(power_off::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod power_off {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn restart(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
subscription_id: &str,
) -> std::result::Result<restart::Response, restart::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/restart",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).map_err(restart::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(restart::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(restart::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(restart::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| restart::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(restart::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(restart::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(restart::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod restart {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn start(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
subscription_id: &str,
) -> std::result::Result<start::Response, start::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/start",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).map_err(start::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(start::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(start::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(start::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| start::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(start::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(start::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(start::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod start {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn redeploy(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_name: &str,
subscription_id: &str,
) -> std::result::Result<redeploy::Response, redeploy::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachines/{}/redeploy",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_name
);
let mut url = url::Url::parse(url_str).map_err(redeploy::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(redeploy::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(redeploy::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(redeploy::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| redeploy::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(redeploy::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(redeploy::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(redeploy::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod redeploy {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod virtual_machine_images {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
location: &str,
publisher_name: &str,
offer: &str,
skus: &str,
version: &str,
subscription_id: &str,
) -> std::result::Result<models::VirtualMachineImage, get::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/providers/Microsoft.Compute/locations/{}/publishers/{}/artifacttypes/vmimage/offers/{}/skus/{}/versions/{}" , operation_config . base_path () , subscription_id , location , publisher_name , offer , skus , version) ;
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineImage =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
location: &str,
publisher_name: &str,
offer: &str,
skus: &str,
expand: Option<&str>,
top: Option<i32>,
orderby: Option<&str>,
subscription_id: &str,
) -> std::result::Result<Vec<models::VirtualMachineImageResource>, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/locations/{}/publishers/{}/artifacttypes/vmimage/offers/{}/skus/{}/versions",
operation_config.base_path(),
subscription_id,
location,
publisher_name,
offer,
skus
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
if let Some(top) = top {
url.query_pairs_mut().append_pair("$top", top.to_string().as_str());
}
if let Some(orderby) = orderby {
url.query_pairs_mut().append_pair("$orderby", orderby);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Vec<models::VirtualMachineImageResource> =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_offers(
operation_config: &crate::OperationConfig,
location: &str,
publisher_name: &str,
subscription_id: &str,
) -> std::result::Result<Vec<models::VirtualMachineImageResource>, list_offers::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/locations/{}/publishers/{}/artifacttypes/vmimage/offers",
operation_config.base_path(),
subscription_id,
location,
publisher_name
);
let mut url = url::Url::parse(url_str).map_err(list_offers::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_offers::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_offers::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_offers::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Vec<models::VirtualMachineImageResource> =
serde_json::from_slice(rsp_body).map_err(|source| list_offers::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_offers::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_offers {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_publishers(
operation_config: &crate::OperationConfig,
location: &str,
subscription_id: &str,
) -> std::result::Result<Vec<models::VirtualMachineImageResource>, list_publishers::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/locations/{}/publishers",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).map_err(list_publishers::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_publishers::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_publishers::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_publishers::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Vec<models::VirtualMachineImageResource> = serde_json::from_slice(rsp_body)
.map_err(|source| list_publishers::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_publishers::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_publishers {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_skus(
operation_config: &crate::OperationConfig,
location: &str,
publisher_name: &str,
offer: &str,
subscription_id: &str,
) -> std::result::Result<Vec<models::VirtualMachineImageResource>, list_skus::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/locations/{}/publishers/{}/artifacttypes/vmimage/offers/{}/skus",
operation_config.base_path(),
subscription_id,
location,
publisher_name,
offer
);
let mut url = url::Url::parse(url_str).map_err(list_skus::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_skus::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_skus::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_skus::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Vec<models::VirtualMachineImageResource> =
serde_json::from_slice(rsp_body).map_err(|source| list_skus::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_skus::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_skus {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod usage {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
location: &str,
subscription_id: &str,
) -> std::result::Result<models::ListUsagesResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/locations/{}/usages",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ListUsagesResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod virtual_machine_sizes {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
location: &str,
subscription_id: &str,
) -> std::result::Result<models::VirtualMachineSizeListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/locations/{}/vmSizes",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineSizeListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod virtual_machine_scale_sets {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
subscription_id: &str,
) -> std::result::Result<models::VirtualMachineScaleSet, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineScaleSet =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
parameters: &models::VirtualMachineScaleSet,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineScaleSet = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineScaleSet = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
Err(create_or_update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::VirtualMachineScaleSet),
Created201(models::VirtualMachineScaleSet),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(delete::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
Err(delete::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn deallocate(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
vm_instance_i_ds: Option<&models::VirtualMachineScaleSetVmInstanceIDs>,
subscription_id: &str,
) -> std::result::Result<deallocate::Response, deallocate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/deallocate",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).map_err(deallocate::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(deallocate::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = if let Some(vm_instance_i_ds) = vm_instance_i_ds {
req_builder = req_builder.header("content-type", "application/json");
azure_core::to_json(vm_instance_i_ds).map_err(deallocate::Error::SerializeError)?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(deallocate::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(deallocate::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| deallocate::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(deallocate::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(deallocate::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(deallocate::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod deallocate {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete_instances(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
vm_instance_i_ds: &models::VirtualMachineScaleSetVmInstanceRequiredIDs,
subscription_id: &str,
) -> std::result::Result<delete_instances::Response, delete_instances::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/delete",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).map_err(delete_instances::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete_instances::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(vm_instance_i_ds).map_err(delete_instances::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete_instances::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(delete_instances::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse = serde_json::from_slice(rsp_body)
.map_err(|source| delete_instances::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(delete_instances::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(delete_instances::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(delete_instances::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete_instances {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_instance_view(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
subscription_id: &str,
) -> std::result::Result<models::VirtualMachineScaleSetInstanceView, get_instance_view::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/instanceView",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).map_err(get_instance_view::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_instance_view::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_instance_view::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_instance_view::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineScaleSetInstanceView = serde_json::from_slice(rsp_body)
.map_err(|source| get_instance_view::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_instance_view::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_instance_view {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
) -> std::result::Result<models::VirtualMachineScaleSetListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineScaleSetListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_all(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::VirtualMachineScaleSetListWithLinkResult, list_all::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Compute/virtualMachineScaleSets",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_all::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_all::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_all::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_all::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineScaleSetListWithLinkResult =
serde_json::from_slice(rsp_body).map_err(|source| list_all::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_all::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_all {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_skus(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
subscription_id: &str,
) -> std::result::Result<models::VirtualMachineScaleSetListSkusResult, list_skus::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/skus",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).map_err(list_skus::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_skus::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_skus::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_skus::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineScaleSetListSkusResult =
serde_json::from_slice(rsp_body).map_err(|source| list_skus::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_skus::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_skus {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn power_off(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
vm_instance_i_ds: Option<&models::VirtualMachineScaleSetVmInstanceIDs>,
subscription_id: &str,
) -> std::result::Result<power_off::Response, power_off::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/poweroff",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).map_err(power_off::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(power_off::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = if let Some(vm_instance_i_ds) = vm_instance_i_ds {
req_builder = req_builder.header("content-type", "application/json");
azure_core::to_json(vm_instance_i_ds).map_err(power_off::Error::SerializeError)?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(power_off::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(power_off::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| power_off::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(power_off::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(power_off::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(power_off::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod power_off {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn restart(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
vm_instance_i_ds: Option<&models::VirtualMachineScaleSetVmInstanceIDs>,
subscription_id: &str,
) -> std::result::Result<restart::Response, restart::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/restart",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).map_err(restart::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(restart::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = if let Some(vm_instance_i_ds) = vm_instance_i_ds {
req_builder = req_builder.header("content-type", "application/json");
azure_core::to_json(vm_instance_i_ds).map_err(restart::Error::SerializeError)?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(restart::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(restart::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| restart::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(restart::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(restart::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(restart::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod restart {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn start(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
vm_instance_i_ds: Option<&models::VirtualMachineScaleSetVmInstanceIDs>,
subscription_id: &str,
) -> std::result::Result<start::Response, start::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/start",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).map_err(start::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(start::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = if let Some(vm_instance_i_ds) = vm_instance_i_ds {
req_builder = req_builder.header("content-type", "application/json");
azure_core::to_json(vm_instance_i_ds).map_err(start::Error::SerializeError)?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(start::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(start::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| start::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(start::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(start::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(start::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod start {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update_instances(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
vm_instance_i_ds: &models::VirtualMachineScaleSetVmInstanceRequiredIDs,
subscription_id: &str,
) -> std::result::Result<update_instances::Response, update_instances::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/manualupgrade",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).map_err(update_instances::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update_instances::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(vm_instance_i_ds).map_err(update_instances::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update_instances::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(update_instances::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse = serde_json::from_slice(rsp_body)
.map_err(|source| update_instances::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update_instances::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(update_instances::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(update_instances::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update_instances {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn reimage(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
subscription_id: &str,
) -> std::result::Result<reimage::Response, reimage::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/reimage",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name
);
let mut url = url::Url::parse(url_str).map_err(reimage::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(reimage::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(reimage::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(reimage::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| reimage::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(reimage::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(reimage::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(reimage::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod reimage {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod virtual_machine_scale_set_v_ms {
use super::{models, API_VERSION};
pub async fn reimage(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
instance_id: &str,
subscription_id: &str,
) -> std::result::Result<reimage::Response, reimage::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/virtualmachines/{}/reimage",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name,
instance_id
);
let mut url = url::Url::parse(url_str).map_err(reimage::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(reimage::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(reimage::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(reimage::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| reimage::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(reimage::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(reimage::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(reimage::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod reimage {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn deallocate(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
instance_id: &str,
subscription_id: &str,
) -> std::result::Result<deallocate::Response, deallocate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/virtualmachines/{}/deallocate",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name,
instance_id
);
let mut url = url::Url::parse(url_str).map_err(deallocate::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(deallocate::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(deallocate::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(deallocate::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| deallocate::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(deallocate::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(deallocate::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(deallocate::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod deallocate {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
instance_id: &str,
subscription_id: &str,
) -> std::result::Result<models::VirtualMachineScaleSetVm, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/virtualmachines/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name,
instance_id
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineScaleSetVm =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
instance_id: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/virtualmachines/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name,
instance_id
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(delete::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
Err(delete::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_instance_view(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
instance_id: &str,
subscription_id: &str,
) -> std::result::Result<models::VirtualMachineScaleSetVmInstanceView, get_instance_view::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/virtualmachines/{}/instanceView",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name,
instance_id
);
let mut url = url::Url::parse(url_str).map_err(get_instance_view::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_instance_view::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_instance_view::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_instance_view::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineScaleSetVmInstanceView = serde_json::from_slice(rsp_body)
.map_err(|source| get_instance_view::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get_instance_view::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get_instance_view {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
virtual_machine_scale_set_name: &str,
filter: Option<&str>,
select: Option<&str>,
expand: Option<&str>,
subscription_id: &str,
) -> std::result::Result<models::VirtualMachineScaleSetVmListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/virtualMachines",
operation_config.base_path(),
subscription_id,
resource_group_name,
virtual_machine_scale_set_name
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(select) = select {
url.query_pairs_mut().append_pair("$select", select);
}
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachineScaleSetVmListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum | {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn power_off(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
instance_id: &str,
subscription_id: &str,
) -> std::result::Result<power_off::Response, power_off::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/virtualmachines/{}/poweroff",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name,
instance_id
);
let mut url = url::Url::parse(url_str).map_err(power_off::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(power_off::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(power_off::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(power_off::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| power_off::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(power_off::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(power_off::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(power_off::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod power_off {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn restart(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
instance_id: &str,
subscription_id: &str,
) -> std::result::Result<restart::Response, restart::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/virtualmachines/{}/restart",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name,
instance_id
);
let mut url = url::Url::parse(url_str).map_err(restart::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(restart::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(restart::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(restart::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| restart::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(restart::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(restart::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(restart::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod restart {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn start(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
vm_scale_set_name: &str,
instance_id: &str,
subscription_id: &str,
) -> std::result::Result<start::Response, start::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/virtualMachineScaleSets/{}/virtualmachines/{}/start",
operation_config.base_path(),
subscription_id,
resource_group_name,
vm_scale_set_name,
instance_id
);
let mut url = url::Url::parse(url_str).map_err(start::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(start::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(start::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(start::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationStatusResponse =
serde_json::from_slice(rsp_body).map_err(|source| start::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(start::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(start::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(start::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod start {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationStatusResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
| Error |
parquet_test.go | package parquet
import (
goparquet "github.com/fraugster/parquet-go"
"os"
"testing"
"github.com/fraugster/parquet-go/parquet"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
const testFile = "../../../test/test2.parquet"
const column = "foo"
func TestReadFile(t *testing.T) {
testFunc := func() {
i, err := FromFile(testFile)
defer func() { _ = i.Close() }()
assert.NoError(t, err)
schema := i.Schema()
assert.Equal(t, 2, len(schema))
{
kind, ok := schema[column]
assert.True(t, ok)
assert.Equal(t, "int64", kind.String())
}
{
kind, ok := schema["bar"]
assert.True(t, ok)
assert.Equal(t, "int32", kind.String()) | }
count := 0
i.Range(func(int, []interface{}) bool {
count++
return false
}, column)
assert.Equal(t, 10000, count)
}
// Enable when you want to create a Parquet file for the test
//initFunc(t, goparquet.WithCompressionCodec(parquet.CompressionCodec_SNAPPY), goparquet.WithCreator("talaria-parquet-unittest"))
testFunc()
}
// Only use if you wish to generate the Parquet file needed for testing
func initFunc(t *testing.T, opts ...goparquet.FileWriterOption) {
_ = os.Mkdir("files", 0755)
wf, err := os.OpenFile(testFile, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0644)
require.NoError(t, err, "creating file failed")
w := goparquet.NewFileWriter(wf, opts...)
fooStore, err := goparquet.NewInt64Store(parquet.Encoding_PLAIN, true, &goparquet.ColumnParameters{})
require.NoError(t, err, "failed to create fooStore")
barStore, err := goparquet.NewInt32Store(parquet.Encoding_PLAIN, true, &goparquet.ColumnParameters{})
require.NoError(t, err, "failed to create barStore")
require.NoError(t, w.AddColumn("foo", goparquet.NewDataColumn(fooStore, parquet.FieldRepetitionType_REQUIRED)))
require.NoError(t, w.AddColumn("bar", goparquet.NewDataColumn(barStore, parquet.FieldRepetitionType_OPTIONAL)))
const (
numRecords = 10000
flushLimit = 1000
)
for idx := 0; idx < numRecords; idx++ {
if idx > 0 && idx%flushLimit == 0 {
require.NoError(t, w.FlushRowGroup(), "%d. AddData failed", idx)
}
require.NoError(t, w.AddData(map[string]interface{}{"foo": int64(idx), "bar": int32(idx)}), "%d. AddData failed", idx)
}
assert.NoError(t, w.Close(), "Close failed")
require.NoError(t, wf.Close())
} | |
predict.go | package controllers
import (
"github.com/Hongbo-Miao/hongbomiao.com/api-go/internal/graphql_server/utils"
"github.com/gin-gonic/gin"
"github.com/rs/zerolog/log"
"net/http"
)
func Predict(c *gin.Context) {
fileHeader, err := c.FormFile("file")
if err != nil {
log.Error().Err(err).Msg("c.FormFile")
c.JSON(http.StatusBadRequest, gin.H{
"error": err.Error(),
})
return
}
prediction, err := utils.GetPrediction(fileHeader)
if err != nil |
c.JSON(http.StatusOK, prediction)
}
| {
log.Error().Err(err).Msg("utils.GetPrediction")
c.JSON(http.StatusInternalServerError, gin.H{
"error": err.Error(),
})
return
} |
ping.rs | use specs::*;
use component::flag::IsPlayer;
use component::time::*;
use types::systemdata::*;
use types::*;
use std::time::{Duration, Instant};
use protocol::server::Ping as ServerPing;
use systems::handlers::game::on_join::SendLogin;
pub struct PingTimerHandler {
lastping: Instant,
}
#[derive(SystemData)]
pub struct PingTimerHandlerData<'a> {
frame: Read<'a, ThisFrame>,
conns: SendToAll<'a>,
clock: ReadClock<'a>,
is_player: ReadStorage<'a, IsPlayer>,
pingdata: WriteStorage<'a, PingData>,
associated: ReadStorage<'a, AssociatedConnection>,
}
impl PingTimerHandler {
pub fn new() -> Self {
Self {
lastping: Instant::now(),
}
}
}
impl<'a> System<'a> for PingTimerHandler {
type SystemData = PingTimerHandlerData<'a>;
fn run(&mut self, mut data: Self::SystemData) |
}
use dispatch::SystemInfo;
use handlers::OnCloseHandler;
use systems::TimerHandler;
impl SystemInfo for PingTimerHandler {
type Dependencies = (OnCloseHandler, TimerHandler, SendLogin);
fn new() -> Self {
Self::new()
}
fn name() -> &'static str {
concat!(module_path!(), "::", line!())
}
}
| {
if data.frame.0 < self.lastping + Duration::from_secs(5) {
return;
}
self.lastping = data.frame.0;
let now = Instant::now();
let clock = data.clock.get();
let ref mut pingdata = data.pingdata;
let ref associated = data.associated;
let ref conns = data.conns;
(pingdata, associated, data.is_player.mask())
.join()
.for_each(|(pingdata, assoc, ..)| {
let ping = pingdata.new_ping(now);
conns.send_to(
assoc.0,
ServerPing {
clock,
num: ping.idx,
},
);
});
} |
InferenceRange.py | """
Created on June 21, 2018
@author: Moritz
"""
import numpy as np
from spn.algorithms.Inference import add_node_likelihood
from spn.experiments.AQP.leaves.static.StaticNumeric import StaticNumeric
def static_likelihood_range(node, ranges, dtype=np.float64, **kwargs):
|
def _compute_probability_for_range(node, interval):
if len(interval) == 1:
if node.val == interval[0]:
return 1
else:
return 0
else:
lower = interval[0]
higher = interval[1]
if lower <= node.val and node.val <= higher:
return 1
else:
return 0
def add_static_inference_range_support():
add_node_likelihood(StaticNumeric, static_likelihood_range)
| assert len(node.scope) == 1, node.scope
probs = np.ones((ranges.shape[0], 1), dtype=dtype)
ranges = ranges[:, node.scope[0]]
for i, rang in enumerate(ranges):
# Skip if no range is specified aka use a log-probability of 0 for that instance
if rang is None:
continue
# Skip if no values for the range are provided
if rang.is_impossible():
probs[i] = 0
# Compute the sum of the probability of all possible values
probs[i] = sum([_compute_probability_for_range(node, interval) for interval in rang.get_ranges()])
return probs |
index.js | import ColorPicker from "./components/ColorPicker"; | export default ColorPicker; |
|
reducers.js | export default function | (state = 0, action) {
switch (action.type) {
case 'INCREMENT':
return state + 1
case 'INCREMENT_IF_ODD':
return (state % 2 !== 0) ? state + 1 : state
case 'DECREMENT':
return state - 1
case 'PRODUCTS_REQUESTED_REAL':
console.log(action.products)
default:
return state
}
}
| counter |
test_flord_g_ctdbp_p_dcl_telemetered_driver.py | import os
import unittest | from mi.dataset.driver.flord_g.ctdbp_p.dcl.flord_g_ctdbp_p_dcl_telemetered_driver import parse
_author__ = 'jeff roy'
log = get_logger()
class DriverTest(unittest.TestCase):
def test_one(self):
source_file_path = os.path.join(RESOURCE_PATH, 'ctdbp01_20150804_061734.DAT')
particle_data_handler = ParticleDataHandler()
particle_data_handler = parse(None, source_file_path, particle_data_handler)
log.debug("SAMPLES: %s", particle_data_handler._samples)
log.debug("FAILURE: %s", particle_data_handler._failure)
self.assertEquals(particle_data_handler._failure, False)
if __name__ == '__main__':
test = DriverTest('test_one')
test.test_one() |
from mi.core.log import get_logger
from mi.dataset.dataset_driver import ParticleDataHandler
from mi.dataset.driver.ctdbp_p.dcl.resource import RESOURCE_PATH |
GenerateOrganizationsAccessReportCommand.ts | import * as __aws_sdk_middleware_stack from "@aws-sdk/middleware-stack";
import * as __aws_sdk_types from "@aws-sdk/types";
import { GenerateOrganizationsAccessReport } from "../model/operations/GenerateOrganizationsAccessReport";
import { InputTypesUnion } from "../types/InputTypesUnion";
import { OutputTypesUnion } from "../types/OutputTypesUnion";
import { GenerateOrganizationsAccessReportInput } from "../types/GenerateOrganizationsAccessReportInput";
import { GenerateOrganizationsAccessReportOutput } from "../types/GenerateOrganizationsAccessReportOutput";
import { IAMResolvedConfiguration } from "../IAMConfiguration";
export * from "../types/GenerateOrganizationsAccessReportInput";
export * from "../types/GenerateOrganizationsAccessReportOutput";
export * from "../types/GenerateOrganizationsAccessReportExceptionsUnion";
export class |
implements
__aws_sdk_types.Command<
InputTypesUnion,
GenerateOrganizationsAccessReportInput,
OutputTypesUnion,
GenerateOrganizationsAccessReportOutput,
IAMResolvedConfiguration,
Blob
> {
readonly model = GenerateOrganizationsAccessReport;
readonly middlewareStack = new __aws_sdk_middleware_stack.MiddlewareStack<
GenerateOrganizationsAccessReportInput,
GenerateOrganizationsAccessReportOutput,
Blob
>();
constructor(readonly input: GenerateOrganizationsAccessReportInput) {}
resolveMiddleware(
clientStack: __aws_sdk_middleware_stack.MiddlewareStack<
InputTypesUnion,
OutputTypesUnion,
Blob
>,
configuration: IAMResolvedConfiguration
): __aws_sdk_types.Handler<
GenerateOrganizationsAccessReportInput,
GenerateOrganizationsAccessReportOutput
> {
const { handler } = configuration;
const stack = clientStack.concat(this.middlewareStack);
const handlerExecutionContext: __aws_sdk_types.HandlerExecutionContext = {
logger: {} as any,
model: this.model
};
return stack.resolve(
handler<
GenerateOrganizationsAccessReportInput,
GenerateOrganizationsAccessReportOutput
>(handlerExecutionContext),
handlerExecutionContext
);
}
}
| GenerateOrganizationsAccessReportCommand |
main.rs | use std::{env, io, process, time::Duration};
use actix_web::{
error::InternalError, guard, http::StatusCode, web, App, HttpResponse, HttpResponseBuilder,
HttpServer, ResponseError,
};
use client::ClientConfig;
use serde::Serialize;
use service::{
auth::{Authentication, Config, Scope},
health::Health,
search::{Search, UPDATE_INTERVAL},
};
use thiserror::Error;
use tokio::sync::Mutex;
mod client;
mod service;
#[cfg(feature = "jemalloc")]
#[global_allocator]
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
const PORT: u16 = 8080;
#[derive(Debug, Error)]
pub enum Error {
#[error("Missing environment variable: {0}")]
MissingEnvVar(String),
#[error("API lib error: {0}")]
APILibrary(#[from] tarkov_database_rs::Error),
#[error("IO error: {0}")]
IOError(#[from] io::Error),
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct StatusResponse {
message: String,
code: u16,
}
impl Into<HttpResponse> for StatusResponse {
fn into(self) -> HttpResponse {
HttpResponseBuilder::new(StatusCode::from_u16(self.code).unwrap()).json(web::Json(self))
}
}
impl Into<actix_web::Error> for StatusResponse {
fn into(self) -> actix_web::Error {
InternalError::from_response("", self.into()).into()
}
}
impl<T: ResponseError> From<T> for StatusResponse {
fn from(err: T) -> Self {
Self {
message: err.to_string(),
code: err.status_code().as_u16(),
}
}
}
#[actix_web::main]
async fn main() -> io::Result<()> {
let port = env::var("PORT").unwrap_or_else(|_| PORT.to_string());
let bind = format!("127.0.0.1:{}", port);
if env::var("RUST_LOG").is_err() {
env::set_var("RUST_LOG", "info");
}
env_logger::init();
let auth_config = match Config::from_env() {
Ok(c) => c,
Err(e) => {
eprintln!("Error while creating auth config: {}", e);
process::exit(2);
}
};
let client = match ClientConfig::from_env() {
Ok(c) => c,
Err(e) => |
};
let update_interval = Duration::from_secs(
env::var("UPDATE_INTERVAL")
.unwrap_or_default()
.parse()
.unwrap_or(UPDATE_INTERVAL),
);
let (index, status) =
match Search::new_state(client.clone().build().unwrap(), update_interval).await {
Ok(s) => s,
Err(e) => {
eprintln!("Error while creating index: {}", e);
process::exit(2);
}
};
let server = HttpServer::new(move || {
let client = Mutex::new(client.clone().build().unwrap());
App::new()
.app_data(
web::QueryConfig::default()
.error_handler(|err, _| StatusResponse::from(err).into()),
)
.app_data(
web::JsonConfig::default().error_handler(|err, _| StatusResponse::from(err).into()),
)
.app_data(auth_config.clone())
.service(
web::resource("/search")
.app_data(index.clone())
.wrap(Authentication::with_scope(Scope::Search))
.default_service(web::route().to(HttpResponse::MethodNotAllowed))
.route(web::get().to(Search::get_handler)),
)
.service(
web::scope("/token")
.app_data(client)
.default_service(web::route().to(HttpResponse::MethodNotAllowed))
.service(
web::resource("")
.guard(guard::Get())
.to(Authentication::get_handler),
)
.service(
web::resource("")
.guard(guard::Post())
.wrap(Authentication::with_scope(Scope::Token))
.to(Authentication::post_handler),
),
)
.service(
web::resource("/health")
.app_data(status.clone())
.wrap(Authentication::new())
.default_service(web::route().to(HttpResponse::MethodNotAllowed))
.route(web::get().to(Health::get_handler)),
)
});
server.bind(bind)?.run().await
}
| {
eprintln!("Error while creating client config: {}", e);
process::exit(2);
} |
ports.py | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from uhd_restpy.base import Base
from uhd_restpy.files import Files
class Ports(Base):
"""
The Ports class encapsulates a list of ports resources that are managed by the system.
A list of resources can be retrieved from the server using the Ports.find() method.
"""
__slots__ = ()
_SDM_NAME = 'ports'
_SDM_ATT_MAP = {
'Description': 'description',
'IsAvailable': 'isAvailable',
'IsBusy': 'isBusy',
'IsLinkUp': 'isLinkUp',
'Location': 'location',
'Owner': 'owner',
'ResourceMode': 'resourceMode',
}
def __init__(self, parent):
super(Ports, self).__init__(parent)
@property
def | (self):
"""
Returns
-------
- str:
"""
return self._get_attribute(self._SDM_ATT_MAP['Description'])
@property
def IsAvailable(self):
"""
Returns
-------
- bool:
"""
return self._get_attribute(self._SDM_ATT_MAP['IsAvailable'])
@property
def IsBusy(self):
"""
Returns
-------
- bool:
"""
return self._get_attribute(self._SDM_ATT_MAP['IsBusy'])
@property
def IsLinkUp(self):
"""
Returns
-------
- bool:
"""
return self._get_attribute(self._SDM_ATT_MAP['IsLinkUp'])
@property
def Location(self):
"""
Returns
-------
- str:
"""
return self._get_attribute(self._SDM_ATT_MAP['Location'])
@property
def Owner(self):
"""
Returns
-------
- str:
"""
return self._get_attribute(self._SDM_ATT_MAP['Owner'])
@property
def ResourceMode(self):
"""
Returns
-------
- str(normal | tenGig | fortyGig | singleMode | dualMode | hundredGigNonFanOut | fortyGigFanOut | threeByTenGigFanOut | eightByTenGigFanOut | fourByTwentyFiveGigNonFanOut | twoByTwentyFiveGigNonFanOut | oneByFiftyGigNonFanOut | fortyGigNonFanOut | oneByTenGigFanOut | fourByTenGigFanOut | incompatibleMode | hundredGigCapturePlayback | fortyGigCapturePlayback | novusHundredGigNonFanOut | novusFourByTwentyFiveGigNonFanOut | novusTwoByFiftyGigNonFanOut | novusOneByFortyGigNonFanOut | novusFourByTenGigNonFanOut | krakenOneByFourHundredGigNonFanOut | krakenOneByTwoHundredGigNonFanOut | krakenTwoByOneHundredGigFanOut | krakenFourByFiftyGigFanOut | aresOneOneByFourHundredGigNonFanOut | aresOneTwoByTwoHundredGigFanOut | aresOneFourByOneHundredGigFanOut | aresOneFourByOneHundredGigMacSecFanOut | aresOneEightByFiftyGigFanOut | uhdOneHundredEightByHundredGigNonFanOut | uhdOneHundredEightByFortyGigNonFanOut | uhdOneHundredSixteenByFiftyGigFanOut | uhdOneHundredThirtyTwoByTwentyFiveGigFanOut | uhdOneHundredThirtyTwoByTenGigFanOut | notApplicable):
"""
return self._get_attribute(self._SDM_ATT_MAP['ResourceMode'])
def find(self, Description=None, IsAvailable=None, IsBusy=None, IsLinkUp=None, Location=None, Owner=None, ResourceMode=None):
"""Finds and retrieves ports resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve ports resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all ports resources from the server.
Args
----
- Description (str):
- IsAvailable (bool):
- IsBusy (bool):
- IsLinkUp (bool):
- Location (str):
- Owner (str):
- ResourceMode (str(normal | tenGig | fortyGig | singleMode | dualMode | hundredGigNonFanOut | fortyGigFanOut | threeByTenGigFanOut | eightByTenGigFanOut | fourByTwentyFiveGigNonFanOut | twoByTwentyFiveGigNonFanOut | oneByFiftyGigNonFanOut | fortyGigNonFanOut | oneByTenGigFanOut | fourByTenGigFanOut | incompatibleMode | hundredGigCapturePlayback | fortyGigCapturePlayback | novusHundredGigNonFanOut | novusFourByTwentyFiveGigNonFanOut | novusTwoByFiftyGigNonFanOut | novusOneByFortyGigNonFanOut | novusFourByTenGigNonFanOut | krakenOneByFourHundredGigNonFanOut | krakenOneByTwoHundredGigNonFanOut | krakenTwoByOneHundredGigFanOut | krakenFourByFiftyGigFanOut | aresOneOneByFourHundredGigNonFanOut | aresOneTwoByTwoHundredGigFanOut | aresOneFourByOneHundredGigFanOut | aresOneFourByOneHundredGigMacSecFanOut | aresOneEightByFiftyGigFanOut | uhdOneHundredEightByHundredGigNonFanOut | uhdOneHundredEightByFortyGigNonFanOut | uhdOneHundredSixteenByFiftyGigFanOut | uhdOneHundredThirtyTwoByTwentyFiveGigFanOut | uhdOneHundredThirtyTwoByTenGigFanOut | notApplicable)):
Returns
-------
- self: This instance with matching ports resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of ports data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the ports resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def ClearOwnership(self):
"""Executes the clearOwnership operation on the server.
Clears ownership on a list of location ports.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
return self._execute('clearOwnership', payload=payload, response_object=None)
| Description |
add-release.component.spec.ts | import { async, ComponentFixture, TestBed } from "@angular/core/testing"; |
describe("AddReleaseComponent", () => {
let component: AddReleaseComponent;
let fixture: ComponentFixture<AddReleaseComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ AddReleaseComponent ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(AddReleaseComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
xit("should be created", () => {
expect(component).toBeTruthy();
});
}); |
import { AddReleaseComponent } from "./add-release.component"; |
test_tensorflow_model_export.py | # pep8: disable=E501
from __future__ import print_function
import collections
import os
import pandas
import shutil
import unittest
import pandas as pd
import sklearn.datasets as datasets
import tensorflow as tf
from mlflow import tensorflow, pyfunc
from mlflow import tracking
from mlflow.utils.file_utils import TempDir
class TestModelExport(unittest.TestCase):
def | (self, feature_spec, tmp, estimator, df):
"""
This functions handles exporting, logging, loading back, and predicting on an estimator for
testing purposes.
"""
receiver_fn = tf.estimator.export.build_raw_serving_input_receiver_fn(feature_spec)
saved_estimator_path = tmp.path("model")
os.makedirs(saved_estimator_path)
# Saving TensorFlow model.
saved_estimator_path = estimator.export_savedmodel(saved_estimator_path,
receiver_fn).decode("utf-8")
# Logging the TensorFlow model just saved.
tensorflow.log_saved_model(saved_model_dir=saved_estimator_path,
signature_def_key="predict",
artifact_path=tmp.path("hello"))
# Loading the saved TensorFlow model as a pyfunc.
x = pyfunc.load_pyfunc(saved_estimator_path)
# Predicting on the dataset using the pyfunc.
return x.predict(df)
def test_log_saved_model(self):
# This tests model logging capabilities on the sklearn.iris dataset.
iris = datasets.load_iris()
X = iris.data[:, :2] # we only take the first two features.
y = iris.target
trainingFeatures = {}
for i in range(0, 2):
# TensorFlow is fickle about feature names, so we remove offending characters
iris.feature_names[i] = iris.feature_names[i].replace(" ", "")
iris.feature_names[i] = iris.feature_names[i].replace("(", "")
iris.feature_names[i] = iris.feature_names[i].replace(")", "")
trainingFeatures[iris.feature_names[i]] = iris.data[:, i:i+1]
tf_feat_cols = []
feature_names = iris.feature_names[:2]
# Creating TensorFlow-specific numeric columns for input.
for col in iris.feature_names[:2]:
tf_feat_cols.append(tf.feature_column.numeric_column(col))
# Creating input training function.
input_train = tf.estimator.inputs.numpy_input_fn(trainingFeatures,
y,
shuffle=False,
batch_size=1)
# Creating Deep Neural Network Regressor.
estimator = tf.estimator.DNNRegressor(feature_columns=tf_feat_cols,
hidden_units=[1])
# Training and creating expected predictions on training dataset.
estimator.train(input_train, steps=10)
# Saving the estimator's prediction on the training data; assume the DNNRegressor
# produces a single output column named 'predictions'
pred_col = "predictions"
estimator_preds = [s[pred_col] for s in estimator.predict(input_train)]
estimator_preds_df = pd.DataFrame({pred_col: estimator_preds})
old_tracking_uri = tracking.get_tracking_uri()
# should_start_run tests whether or not calling log_model() automatically starts a run.
for should_start_run in [False, True]:
with TempDir(chdr=True, remove_on_exit=True) as tmp:
try:
# Creating dict of features names (str) to placeholders (tensors)
feature_spec = {}
for name in feature_names:
feature_spec[name] = tf.placeholder("float", name=name, shape=[150])
tracking.set_tracking_uri("test")
if should_start_run:
tracking.start_run()
pyfunc_preds_df = self.helper(feature_spec, tmp, estimator,
pandas.DataFrame(data=X, columns=feature_names))
# Asserting that the loaded model predictions are as expected.
assert estimator_preds_df.equals(pyfunc_preds_df)
finally:
# Restoring the old logging location.
tracking.end_run()
tracking.set_tracking_uri(old_tracking_uri)
def test_categorical_columns(self):
"""
This tests logging capabilities on datasets with categorical columns.
See https://github.com/tensorflow/tensorflow/blob/master/tensorflow/examples/get_started/\
regression/imports85.py
for reference code.
"""
with TempDir(chdr=False, remove_on_exit=True) as tmp:
path = os.path.abspath("tests/data/uci-autos-imports-85.data")
# Order is important for the csv-readers, so we use an OrderedDict here.
defaults = collections.OrderedDict([
("body-style", [""]),
("curb-weight", [0.0]),
("highway-mpg", [0.0]),
("price", [0.0])
])
types = collections.OrderedDict((key, type(value[0]))
for key, value in defaults.items())
df = pandas.read_csv(path, names=types.keys(), dtype=types, na_values="?")
df = df.dropna()
# Extract the label from the features dataframe.
y_train = df.pop("price")
# Creating the input training function required.
trainingFeatures = {}
for i in df:
trainingFeatures[i] = df[i].values
input_train = tf.estimator.inputs.numpy_input_fn(trainingFeatures,
y_train.values,
shuffle=False,
batch_size=1)
# Creating the feature columns required for the DNNRegressor.
body_style_vocab = ["hardtop", "wagon", "sedan", "hatchback", "convertible"]
body_style = tf.feature_column.categorical_column_with_vocabulary_list(
key="body-style", vocabulary_list=body_style_vocab)
feature_columns = [
tf.feature_column.numeric_column(key="curb-weight"),
tf.feature_column.numeric_column(key="highway-mpg"),
# Since this is a DNN model, convert categorical columns from sparse
# to dense.
# Wrap them in an `indicator_column` to create a
# one-hot vector from the input.
tf.feature_column.indicator_column(body_style)
]
# Build a DNNRegressor, with 2x20-unit hidden layers, with the feature columns
# defined above as input.
estimator = tf.estimator.DNNRegressor(
hidden_units=[20, 20], feature_columns=feature_columns)
# Training the estimator.
estimator.train(input_fn=input_train, steps=10)
# Saving the estimator's prediction on the training data; assume the DNNRegressor
# produces a single output column named 'predictions'
pred_col = "predictions"
estimator_preds = [s[pred_col] for s in estimator.predict(input_train)]
estimator_preds_df = pd.DataFrame({pred_col: estimator_preds})
# Setting the logging such that it is in the temp folder and deleted after the test.
old_tracking_dir = tracking.get_tracking_uri()
tracking_dir = os.path.abspath(tmp.path("mlruns"))
tracking.set_tracking_uri("file://%s" % tracking_dir)
tracking.start_run()
try:
# Creating dict of features names (str) to placeholders (tensors)
feature_spec = {}
feature_spec["body-style"] = tf.placeholder("string",
name="body-style",
shape=[None])
feature_spec["curb-weight"] = tf.placeholder("float",
name="curb-weight",
shape=[None])
feature_spec["highway-mpg"] = tf.placeholder("float",
name="highway-mpg",
shape=[None])
pyfunc_preds_df = self.helper(feature_spec, tmp, estimator, df)
# Asserting that the loaded model predictions are as expected. Allow for some
# imprecision as this is expected with TensorFlow.
pandas.testing.assert_frame_equal(
pyfunc_preds_df, estimator_preds_df, check_less_precise=6)
finally:
# Restoring the old logging location.
tracking.end_run()
tracking.set_tracking_uri(old_tracking_dir)
| helper |
candidate-notification-form.component.ts | import { Component, OnDestroy, Input, ViewChild } from '@angular/core';
import { FormGroup } from '@angular/forms';
import { ICandidateInterview, ICandidate } from '@leano/contracts';
import { CandidateEmailComponent } from './candidate-email/candidate-email.component';
@Component({
selector: 'ga-candidate-notification-form',
templateUrl: 'candidate-notification-form.component.html',
styleUrls: ['candidate-notification-form.component.scss']
})
export class CandidateNotificationFormComponent implements OnDestroy {
@Input() interview: ICandidateInterview;
@Input() selectedCandidate: ICandidate;
@Input() employees: any[];
candidateForm: FormGroup;
interviewerForm: FormGroup;
isCandidateNotification = false;
isInterviewerNotification = false;
@ViewChild('emailCandidateForm')
emailCandidateForm: CandidateEmailComponent; |
@ViewChild('emailInterviewerForm')
emailInterviewerForm: CandidateEmailComponent;
constructor() {}
notification() {
if (this.emailCandidateForm) {
this.emailCandidateForm.loadFormData();
}
if (this.emailInterviewerForm) {
this.emailInterviewerForm.loadFormData();
}
}
checkedCandidate(checked: boolean) {
this.isCandidateNotification = checked;
}
checkedInterviewer(checked: boolean) {
this.isInterviewerNotification = checked;
}
ngOnDestroy() {}
} | |
function_target.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
annotations::Annotations,
borrow_analysis, livevar_analysis, reaching_def_analysis,
stackless_bytecode::{AttrId, Bytecode, Operation, SpecBlockId, TempIndex},
};
use itertools::Itertools;
use move_model::{
ast::{Exp, Spec},
model::{FunId, FunctionEnv, GlobalEnv, Loc, ModuleEnv, QualifiedId, StructId, TypeParameter},
symbol::{Symbol, SymbolPool},
ty::{Type, TypeDisplayContext},
};
use std::{
cell::RefCell,
collections::{BTreeMap, BTreeSet},
fmt,
};
use vm::file_format::CodeOffset;
/// A FunctionTarget is a drop-in replacement for a FunctionEnv which allows to rewrite
/// and analyze bytecode and parameter/local types. It encapsulates a FunctionEnv and information
/// which can be rewritten using the `FunctionTargetsHolder` data structure.
pub struct FunctionTarget<'env> {
pub func_env: &'env FunctionEnv<'env>,
pub data: &'env FunctionData,
// Used for debugging and testing, containing any attached annotation formatters.
annotation_formatters: RefCell<Vec<Box<AnnotationFormatter>>>,
}
impl<'env> Clone for FunctionTarget<'env> {
fn clone(&self) -> Self {
// Annotation formatters are transient and forgotten on clone, so this is a cheap
// handle.
Self {
func_env: self.func_env,
data: self.data,
annotation_formatters: RefCell::new(vec![]),
}
}
}
/// Holds the owned data belonging to a FunctionTarget, which can be rewritten using
/// the `FunctionTargetsHolder::rewrite` method.
#[derive(Debug)]
pub struct FunctionData {
/// The bytecode.
pub code: Vec<Bytecode>,
/// The locals, including parameters.
pub local_types: Vec<Type>,
/// The return types.
pub return_types: Vec<Type>,
/// TODO(wrwg): document what this is for
pub param_proxy_map: BTreeMap<usize, usize>,
/// A map from mut ref input parameters to the generated output parameters.
pub ref_param_proxy_map: BTreeMap<usize, usize>,
/// A map from mut ref output parameters to the input parameters.
pub ref_param_return_map: BTreeMap<usize, usize>,
/// The set of global resources acquired by this function.
pub acquires_global_resources: Vec<StructId>,
/// A map from byte code attribute to source code location.
pub locations: BTreeMap<AttrId, Loc>,
/// Annotations associated with this function.
pub annotations: Annotations,
/// Map of spec block ids as given by the source, to the code offset in the original
/// bytecode. Those spec block's content is found at
/// `func_env.get_specification_on_impl(code_offset)`.
pub spec_blocks_on_impl: BTreeMap<SpecBlockId, CodeOffset>,
/// A map from local names to temp indices in code.
pub name_to_index: BTreeMap<Symbol, usize>,
/// A cache of targets modified by this function.
pub modify_targets: BTreeMap<QualifiedId<StructId>, Vec<Exp>>,
}
pub struct FunctionDataBuilder<'a> {
pub data: &'a mut FunctionData,
pub next_attr_index: usize,
}
impl<'env> FunctionTarget<'env> {
pub fn new(
func_env: &'env FunctionEnv<'env>,
data: &'env FunctionData,
) -> FunctionTarget<'env> {
FunctionTarget {
func_env,
data,
annotation_formatters: RefCell::new(vec![]),
}
}
/// Returns the name of this function.
pub fn get_name(&self) -> Symbol {
self.func_env.get_name()
}
/// Gets the id of this function.
pub fn get_id(&self) -> FunId {
self.func_env.get_id()
}
/// Shortcut for accessing the symbol pool.
pub fn symbol_pool(&self) -> &SymbolPool {
self.func_env.module_env.symbol_pool()
}
/// Shortcut for accessing the module env of this function.
pub fn module_env(&self) -> &ModuleEnv {
&self.func_env.module_env
}
/// Shortcut for accessing the global env of this function.
pub fn global_env(&self) -> &GlobalEnv {
self.func_env.module_env.env
}
/// Returns the location of this function.
pub fn get_loc(&self) -> Loc {
self.func_env.get_loc()
}
/// Returns the location of the bytecode with the given attribute.
pub fn get_bytecode_loc(&self, attr_id: AttrId) -> Loc {
if let Some(loc) = self.data.locations.get(&attr_id) {
loc.clone()
} else {
self.get_loc()
}
}
/// Returns true if this function is native.
pub fn is_native(&self) -> bool {
self.func_env.is_native()
}
/// Returns true if this function is opaque.
pub fn is_opaque(&self) -> bool {
self.func_env.is_opaque()
}
/// Returns true if this function is public.
pub fn is_public(&self) -> bool {
self.func_env.is_public()
}
/// Returns true if this function mutates any references (i.e. has &mut parameters).
pub fn is_mutating(&self) -> bool {
self.func_env.is_mutating()
}
/// Returns the type parameters associated with this function.
pub fn get_type_parameters(&self) -> Vec<TypeParameter> {
self.func_env.get_type_parameters()
}
/// Returns return type at given index.
pub fn get_return_type(&self, idx: usize) -> &Type {
&self.data.return_types[idx]
}
/// Returns return types of this function.
pub fn get_return_types(&self) -> &[Type] {
&self.data.return_types
}
/// Returns the number of return values of this function.
pub fn get_return_count(&self) -> usize {
self.data.return_types.len()
}
pub fn get_parameter_count(&self) -> usize {
self.func_env.get_parameter_count()
}
/// Get the name to be used for a local. If the local is an argument, use that for naming,
/// otherwise generate a unique name.
pub fn get_local_name(&self, idx: usize) -> Symbol {
self.func_env.get_local_name(idx)
}
/// Get the index corresponding to a local name
pub fn get_local_index(&self, name: Symbol) -> Option<&usize> {
self.data.name_to_index.get(&name)
}
/// Gets the number of locals of this function, including parameters.
pub fn get_local_count(&self) -> usize {
self.data.local_types.len()
}
/// Gets the number of user declared locals of this function, excluding locals which have
/// been introduced by transformations.
pub fn get_user_local_count(&self) -> usize {
self.func_env.get_local_count()
}
/// Returns true if the index is for a temporary, not user declared local.
pub fn is_temporary(&self, idx: usize) -> bool {
self.func_env.is_temporary(idx)
}
/// Gets the type of the local at index. This must use an index in the range as determined by
/// `get_local_count`.
pub fn get_local_type(&self, idx: usize) -> &Type {
&self.data.local_types[idx]
}
/// Returns specification associated with this function.
pub fn get_spec(&'env self) -> &'env Spec {
self.func_env.get_spec()
}
/// Returns specification conditions associated with this function at spec block id.
pub fn get_spec_on_impl(&'env self, block_id: SpecBlockId) -> &'env Spec {
let code_offset = self
.data
.spec_blocks_on_impl
.get(&block_id)
.expect("spec block defined");
self.func_env
.get_spec()
.on_impl
.get(code_offset)
.expect("given spec block defined")
}
/// Returns the value of a boolean pragma for this function. This first looks up a
/// pragma in this function, then the enclosing module, and finally uses the provided default.
/// property
pub fn is_pragma_true(&self, name: &str, default: impl FnOnce() -> bool) -> bool {
self.func_env.is_pragma_true(name, default)
}
/// Gets the bytecode.
pub fn get_bytecode(&self) -> &[Bytecode] {
&self.data.code
}
/// Gets annotations.
pub fn get_annotations(&self) -> &Annotations {
&self.data.annotations
}
/// Gets acquired resources
pub fn get_acquires_global_resources(&self) -> &[StructId] {
&self.data.acquires_global_resources
}
/// Gets index of return parameter for a reference input parameter
pub fn get_return_index(&self, idx: usize) -> Option<&usize> {
self.data.ref_param_return_map.get(&idx)
}
/// For a return index, return the reference input parameter. Inverse of
/// `get_return_index`.
pub fn get_input_for_return_index(&self, idx: usize) -> Option<&usize> {
// We do a brute force linear search. This may need to be changed if we are dealing
// with truly large (like generated) parameter lists.
for (ref_idx, ret_idx) in &self.data.ref_param_return_map {
if *ret_idx == idx {
return Some(ref_idx);
}
}
None
}
/// TODO(wrwg): better document what this does, it seems to be related to loop invariants.
pub fn get_proxy_index(&self, idx: usize) -> Option<&usize> {
self.data.param_proxy_map.get(&idx)
}
/// Gets index of mutable proxy variable for an input ref parameter
pub fn get_ref_proxy_index(&self, idx: usize) -> Option<&usize> {
self.data.ref_param_proxy_map.get(&idx)
}
/// Reverse of `get_ref_proxy_index`.
pub fn get_reverse_ref_proxy_index(&self, idx: usize) -> Option<&usize> {
// We do a brute force linear search.
for (ref_idx, proxy_idx) in &self.data.ref_param_proxy_map {
if *proxy_idx == idx {
return Some(ref_idx);
}
}
None
}
/// Returns true if this is an unchecked parameter. Such a parameter (currently) stems
/// from a `&mut` parameter in Move which has been converted to in/out parameters in the
/// transformation pipeline, provided this is a private function.
pub fn is_unchecked_param(&self, idx: TempIndex) -> bool {
(!self.is_public() || !self.call_ends_lifetime()) && self.get_ref_proxy_index(idx).is_some()
}
/// Returns whether a call to this function ends lifetime of input references
pub fn call_ends_lifetime(&self) -> bool {
self.is_public() && self.get_return_types().iter().all(|ty| !ty.is_reference())
}
/// Gets modify targets for a type
pub fn get_modify_targets_for_type(&self, ty: &QualifiedId<StructId>) -> Option<&Vec<Exp>> {
self.get_modify_targets().get(ty)
}
/// Gets all modify targets | }
impl FunctionData {
/// Creates new function target data.
pub fn new(
func_env: &FunctionEnv<'_>,
code: Vec<Bytecode>,
local_types: Vec<Type>,
return_types: Vec<Type>,
locations: BTreeMap<AttrId, Loc>,
acquires_global_resources: Vec<StructId>,
given_spec_blocks: BTreeMap<SpecBlockId, CodeOffset>,
) -> Self {
let name_to_index = (0..func_env.get_local_count())
.map(|idx| (func_env.get_local_name(idx), idx))
.collect();
let modify_targets = func_env.get_modify_targets();
FunctionData {
code,
local_types,
return_types,
param_proxy_map: Default::default(),
ref_param_proxy_map: Default::default(),
ref_param_return_map: Default::default(),
acquires_global_resources,
locations,
annotations: Default::default(),
spec_blocks_on_impl: given_spec_blocks,
name_to_index,
modify_targets,
}
}
/// Gets the next available index for AttrId.
pub fn next_free_attr_index(&self) -> usize {
self.code
.iter()
.map(|b| b.get_attr_id().as_usize())
.max()
.unwrap_or(0)
+ 1
}
/// Return the set of callees invoked by this function, including native functions
pub fn get_callees(&self) -> BTreeSet<QualifiedId<FunId>> {
use Bytecode::*;
use Operation::*;
let mut callees = BTreeSet::new();
for instr in &self.code {
if let Call(_, _, Function(mid, fid, _), _) = instr {
let callee = mid.qualified(*fid);
if !callees.contains(&callee) {
callees.insert(callee);
}
}
}
callees
}
/// Apply a variable renaming to this data, adjusting internal data structures.
pub fn rename_vars<F>(&mut self, f: &F)
where
F: Fn(TempIndex) -> TempIndex,
{
self.param_proxy_map = std::mem::take(&mut self.param_proxy_map)
.into_iter()
.map(|(x, y)| (f(x), f(y)))
.collect();
self.ref_param_proxy_map = std::mem::take(&mut self.ref_param_proxy_map)
.into_iter()
.map(|(x, y)| (f(x), f(y)))
.collect();
}
/// Create a clone of this function data, without code and annotations.
pub fn clone_without_code(&self) -> Self {
FunctionData {
code: vec![],
local_types: self.local_types.clone(),
return_types: self.return_types.clone(),
param_proxy_map: self.param_proxy_map.clone(),
ref_param_proxy_map: self.ref_param_proxy_map.clone(),
ref_param_return_map: self.ref_param_return_map.clone(),
acquires_global_resources: self.acquires_global_resources.clone(),
locations: self.locations.clone(),
annotations: Default::default(),
spec_blocks_on_impl: self.spec_blocks_on_impl.clone(),
name_to_index: self.name_to_index.clone(),
modify_targets: self.modify_targets.clone(),
}
}
}
// =================================================================================================
// Formatting
/// A function which is called to display the value of an annotation for a given function target
/// at the given code offset. The function is passed the function target and the code offset, and
/// is expected to pick the annotation of its respective type from the function target and for
/// the given code offset. It should return None if there is no relevant annotation.
pub type AnnotationFormatter = dyn Fn(&FunctionTarget<'_>, CodeOffset) -> Option<String>;
impl<'env> FunctionTarget<'env> {
/// Register a formatter. Each function target processor which introduces new annotations
/// should register a formatter in order to get is value printed when a function target
/// is displayed for debugging or testing.
pub fn register_annotation_formatter(&self, formatter: Box<AnnotationFormatter>) {
self.annotation_formatters.borrow_mut().push(formatter);
}
/// Tests use this function to register all relevant annotation formatters. Extend this with
/// new formatters relevant for tests.
pub fn register_annotation_formatters_for_test(&self) {
self.register_annotation_formatter(Box::new(livevar_analysis::format_livevar_annotation));
self.register_annotation_formatter(Box::new(borrow_analysis::format_borrow_annotation));
self.register_annotation_formatter(Box::new(
reaching_def_analysis::format_reaching_def_annotation,
));
}
}
impl<'env> fmt::Display for FunctionTarget<'env> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}fun {}::{}",
if self.is_public() { "pub " } else { "" },
self.func_env
.module_env
.get_name()
.display(self.symbol_pool()),
self.get_name().display(self.symbol_pool())
)?;
let tparams = &self.get_type_parameters();
if !tparams.is_empty() {
write!(f, "<")?;
for (i, TypeParameter(name, _)) in tparams.iter().enumerate() {
if i > 0 {
write!(f, ", ")?;
}
write!(f, "{}", name.display(self.symbol_pool()))?;
}
write!(f, ">")?;
}
let tctx = TypeDisplayContext::WithEnv {
env: self.global_env(),
type_param_names: None,
};
write!(f, "(")?;
for i in 0..self.get_parameter_count() {
if i > 0 {
write!(f, ", ")?;
}
write!(
f,
"{}: {}",
self.get_local_name(i).display(self.symbol_pool()),
self.get_local_type(i).display(&tctx)
)?;
}
write!(f, ")")?;
if self.get_return_count() > 0 {
write!(f, ": ")?;
if self.get_return_count() > 1 {
write!(f, "(")?;
}
for i in 0..self.get_return_count() {
if i > 0 {
write!(f, ", ")?;
}
write!(f, "{}", self.get_return_type(i).display(&tctx))?;
}
if self.get_return_count() > 1 {
write!(f, ")")?;
}
}
writeln!(f, " {{")?;
for i in self.get_parameter_count()..self.get_local_count() {
writeln!(
f,
" var {}: {}",
self.get_local_name(i).display(self.symbol_pool()),
self.get_local_type(i).display(&tctx)
)?;
}
for (offset, code) in self.get_bytecode().iter().enumerate() {
let annotations = self
.annotation_formatters
.borrow()
.iter()
.filter_map(|f| f(self, offset as CodeOffset))
.map(|s| format!(" // {}", s.replace("\n", "\n // ")))
.join("\n");
if !annotations.is_empty() {
writeln!(f, "{}", annotations)?;
}
writeln!(f, "{:>3}: {}", offset, code.display(self))?;
}
writeln!(f, "}}")?;
Ok(())
}
} | pub fn get_modify_targets(&self) -> &BTreeMap<QualifiedId<StructId>, Vec<Exp>> {
&self.data.modify_targets
} |
config.go | // Copyright 2017 The Grafeas Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
// | // Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package config
import (
"errors"
"io/ioutil"
"log"
fernet "github.com/fernet/fernet-go"
"github.com/grafeas/grafeas/samples/server/go-server/api/server/api"
"github.com/grafeas/grafeas/samples/server/go-server/api/server/storage"
"gopkg.in/yaml.v2"
)
// File is the grafeas config file.
type file struct {
Grafeas *config `yaml:"grafeas"`
}
// Config is the global configuration for an instance of Grafeas.
type config struct {
API *api.Config `yaml:"api"`
StorageType string `yaml:"storage_type"` // Supported storage types are "memstore" and "postgres"
PgSQLConfig *storage.PgSQLConfig `yaml:"postgres"`
}
// DefaultConfig is a configuration that can be used as a fallback value.
func defaultConfig() *config {
return &config{
API: &api.Config{
Address: "0.0.0.0:8080",
CertFile: "",
KeyFile: "",
CAFile: "",
},
StorageType: "memstore",
PgSQLConfig: &storage.PgSQLConfig{},
}
}
// Creates a config from a YAML-file. If fileName is an empty
// string a default config will be returned.
func LoadConfig(fileName string) (*config, error) {
if fileName == "" {
return defaultConfig(), nil
}
data, err := ioutil.ReadFile(fileName)
if err != nil {
return nil, err
}
var configFile file
err = yaml.Unmarshal(data, &configFile)
if err != nil {
return nil, err
}
config := configFile.Grafeas
if config.StorageType == "postgres" {
// Generate a pagination key if none is provided.
if config.PgSQLConfig.PaginationKey == "" {
log.Println("pagination key is empty, generating...")
var key fernet.Key
if err = key.Generate(); err != nil {
return nil, err
}
config.PgSQLConfig.PaginationKey = key.Encode()
} else {
_, err = fernet.DecodeKey(config.PgSQLConfig.PaginationKey)
if err != nil {
err = errors.New("Invalid Pagination key; must be 32-bit URL-safe base64")
return nil, err
}
}
}
return config, nil
} | |
login.py | from flask_restful import abort, Resource
from flask import request, g, session
from flask.json import jsonify
from whistle_server.models.user import User
| class LoginEndpoint(Resource):
def post(self):
username = request.json.get('username')
password = request.json.get('password')
# wrong input
if username is None or password is None:
abort(418)
user = User.find_by_username(username)
# user doesn't exist
if user is None:
return abort(418)
# wrong password
if not verify_password(password, user.obj["password_hash"]):
return abort(418)
session["_session"] = str(user.obj['_id'])
response = jsonify({
"user_id": str(user.obj["_id"])
})
response.status_code = 201
return response
class CreateUserEndpoint(Resource):
def post(self):
username = request.json.get('username')
password = request.json.get('password')
# wrong input
if username is None or password is None:
print('username or password is None')
abort(418)
user = User.create(username, password)
if user is None:
print('User was None')
abort(418)
response = jsonify({})
response.status_code = 200
return response | def verify_password(password, hashed):
from werkzeug.security import check_password_hash
return check_password_hash(hashed, password)
|
ts_project.bzl | "ts_project rule"
load("@build_bazel_rules_nodejs//:providers.bzl", "DeclarationInfo", "ExternalNpmPackageInfo", "declaration_info", "js_module_info", "run_node")
load("@build_bazel_rules_nodejs//internal/linker:link_node_modules.bzl", "module_mappings_aspect")
load("@build_bazel_rules_nodejs//internal/node:node.bzl", "nodejs_binary")
load(":ts_config.bzl", "TsConfigInfo", "write_tsconfig")
_ValidOptionsInfo = provider()
_DEFAULT_TSC = (
# BEGIN-INTERNAL
"@npm" +
# END-INTERNAL
"//typescript/bin:tsc"
)
_DEFAULT_TYPESCRIPT_PACKAGE = (
# BEGIN-INTERNAL
"@npm" +
# END-INTERNAL
"//typescript"
)
_ATTRS = {
"args": attr.string_list(),
"data": attr.label_list(default = [], allow_files = True),
"declaration_dir": attr.string(),
"deps": attr.label_list(
providers = [
# Provide one or the other of these
[DeclarationInfo],
[_ValidOptionsInfo],
],
aspects = [module_mappings_aspect],
),
"extends": attr.label(allow_files = [".json"]),
"link_workspace_root": attr.bool(),
"out_dir": attr.string(), | # NB: no restriction on extensions here, because tsc sometimes adds type-check support
# for more file kinds (like require('some.json')) and also
# if you swap out the `compiler` attribute (like with ngtsc)
# that compiler might allow more sources than tsc does.
"srcs": attr.label_list(allow_files = True, mandatory = True),
"supports_workers": attr.bool(default = False),
"tsc": attr.label(default = Label(_DEFAULT_TSC), executable = True, cfg = "host"),
"tsconfig": attr.label(mandatory = True, allow_single_file = [".json"]),
}
# tsc knows how to produce the following kinds of output files.
# NB: the macro `ts_project_macro` will set these outputs based on user
# telling us which settings are enabled in the tsconfig for this project.
_OUTPUTS = {
"buildinfo_out": attr.output(),
"js_outs": attr.output_list(),
"map_outs": attr.output_list(),
"typing_maps_outs": attr.output_list(),
"typings_outs": attr.output_list(),
}
def _join(*elements):
segments = [f for f in elements if f]
if len(segments):
return "/".join(segments)
return "."
def _calculate_root_dir(ctx):
some_generated_path = None
some_source_path = None
root_path = None
# Note we don't have access to the ts_project macro allow_js param here.
# For error-handling purposes, we can assume that any .js/.jsx
# input is meant to be in the rootDir alongside .ts/.tsx sources,
# whether the user meant for them to be sources or not.
# It's a non-breaking change to relax this constraint later, but would be
# a breaking change to restrict it further.
allow_js = True
for src in ctx.files.srcs:
if _is_ts_src(src.path, allow_js):
if src.is_source:
some_source_path = src.path
else:
some_generated_path = src.path
root_path = ctx.bin_dir.path
if some_source_path and some_generated_path:
fail("ERROR: %s srcs cannot be a mix of generated files and source files " % ctx.label +
"since this would prevent giving a single rootDir to the TypeScript compiler\n" +
" found generated file %s and source file %s" %
(some_generated_path, some_source_path))
return _join(
root_path,
ctx.label.workspace_root,
ctx.label.package,
ctx.attr.root_dir,
)
def _ts_project_impl(ctx):
arguments = ctx.actions.args()
execution_requirements = {}
progress_prefix = "Compiling TypeScript project"
if ctx.attr.supports_workers:
# Set to use a multiline param-file for worker mode
arguments.use_param_file("@%s", use_always = True)
arguments.set_param_file_format("multiline")
execution_requirements["supports-workers"] = "1"
execution_requirements["worker-key-mnemonic"] = "TsProject"
progress_prefix = "Compiling TypeScript project (worker mode)"
# Add user specified arguments *before* rule supplied arguments
arguments.add_all(ctx.attr.args)
arguments.add_all([
"--project",
ctx.file.tsconfig.path,
"--outDir",
_join(ctx.bin_dir.path, ctx.label.workspace_root, ctx.label.package, ctx.attr.out_dir),
"--rootDir",
_calculate_root_dir(ctx),
])
if len(ctx.outputs.typings_outs) > 0:
declaration_dir = ctx.attr.declaration_dir if ctx.attr.declaration_dir else ctx.attr.out_dir
arguments.add_all([
"--declarationDir",
_join(ctx.bin_dir.path, ctx.label.workspace_root, ctx.label.package, declaration_dir),
])
# When users report problems, we can ask them to re-build with
# --define=VERBOSE_LOGS=1
# so anything that's useful to diagnose rule failures belongs here
if "VERBOSE_LOGS" in ctx.var.keys():
arguments.add_all([
# What files were in the ts.Program
"--listFiles",
# Did tsc write all outputs to the place we expect to find them?
"--listEmittedFiles",
# Why did module resolution fail?
"--traceResolution",
# Why was the build slow?
"--diagnostics",
"--extendedDiagnostics",
])
deps_depsets = []
inputs = ctx.files.srcs[:]
for dep in ctx.attr.deps:
if TsConfigInfo in dep:
deps_depsets.append(dep[TsConfigInfo].deps)
if ExternalNpmPackageInfo in dep:
# TODO: we could maybe filter these to be tsconfig.json or *.d.ts only
# we don't expect tsc wants to read any other files from npm packages.
deps_depsets.append(dep[ExternalNpmPackageInfo].sources)
if DeclarationInfo in dep:
deps_depsets.append(dep[DeclarationInfo].transitive_declarations)
if _ValidOptionsInfo in dep:
inputs.append(dep[_ValidOptionsInfo].marker)
inputs.extend(depset(transitive = deps_depsets).to_list())
# Gather TsConfig info from both the direct (tsconfig) and indirect (extends) attribute
tsconfig_inputs = _tsconfig_inputs(ctx)
inputs.extend(tsconfig_inputs)
# We do not try to predeclare json_outs, because their output locations generally conflict with their path in the source tree.
# (The exception is when out_dir is used, then the .json output is a different path than the input.)
# However tsc will copy .json srcs to the output tree so we want to declare these outputs to include along with .js Default outs
# NB: We don't have emit_declaration_only setting here, so use presence of any JS outputs as an equivalent.
# tsc will only produce .json if it also produces .js
if len(ctx.outputs.js_outs):
pkg_len = len(ctx.label.package) + 1 if len(ctx.label.package) else 0
json_outs = [
ctx.actions.declare_file(_join(ctx.attr.out_dir, src.short_path[pkg_len:]))
for src in ctx.files.srcs
if src.basename.endswith(".json") and src.is_source
]
else:
json_outs = []
outputs = json_outs + ctx.outputs.js_outs + ctx.outputs.map_outs + ctx.outputs.typings_outs + ctx.outputs.typing_maps_outs
if ctx.outputs.buildinfo_out:
arguments.add_all([
"--tsBuildInfoFile",
ctx.outputs.buildinfo_out.path,
])
outputs.append(ctx.outputs.buildinfo_out)
runtime_outputs = json_outs + ctx.outputs.js_outs + ctx.outputs.map_outs
typings_outputs = ctx.outputs.typings_outs + ctx.outputs.typing_maps_outs + [s for s in ctx.files.srcs if s.path.endswith(".d.ts")]
default_outputs_depset = depset(runtime_outputs) if len(runtime_outputs) else depset(typings_outputs)
if len(outputs) > 0:
run_node(
ctx,
inputs = inputs,
arguments = [arguments],
outputs = outputs,
mnemonic = "TsProject",
executable = "tsc",
execution_requirements = execution_requirements,
progress_message = "%s %s [tsc -p %s]" % (
progress_prefix,
ctx.label,
ctx.file.tsconfig.short_path,
),
link_workspace_root = ctx.attr.link_workspace_root,
)
providers = [
# DefaultInfo is what you see on the command-line for a built library,
# and determines what files are used by a simple non-provider-aware
# downstream library.
# Only the JavaScript outputs are intended for use in non-TS-aware
# dependents.
DefaultInfo(
files = default_outputs_depset,
runfiles = ctx.runfiles(
transitive_files = depset(ctx.files.data, transitive = [
default_outputs_depset,
]),
collect_default = True,
),
),
js_module_info(
sources = depset(runtime_outputs),
deps = ctx.attr.deps,
),
TsConfigInfo(deps = depset(tsconfig_inputs, transitive = [
dep[TsConfigInfo].deps
for dep in ctx.attr.deps
if TsConfigInfo in dep
])),
coverage_common.instrumented_files_info(
ctx,
source_attributes = ["srcs"],
dependency_attributes = ["deps"],
extensions = ["ts", "tsx"],
),
]
# Only provide DeclarationInfo if there are some typings.
# Improves error messaging if a ts_project needs declaration = True
typings_in_deps = [d for d in ctx.attr.deps if DeclarationInfo in d]
if len(typings_outputs) or len(typings_in_deps):
providers.append(declaration_info(depset(typings_outputs), typings_in_deps))
providers.append(OutputGroupInfo(types = depset(typings_outputs)))
return providers
def _tsconfig_inputs(ctx):
"""Returns all transitively referenced tsconfig files from "tsconfig" and "extends" attributes."""
inputs = []
if TsConfigInfo in ctx.attr.tsconfig:
inputs.extend(ctx.attr.tsconfig[TsConfigInfo].deps)
else:
inputs.append(ctx.file.tsconfig)
if hasattr(ctx.attr, "extends") and ctx.attr.extends:
if TsConfigInfo in ctx.attr.extends:
inputs.extend(ctx.attr.extends[TsConfigInfo].deps)
else:
inputs.extend(ctx.attr.extends.files.to_list())
return inputs
ts_project = rule(
implementation = _ts_project_impl,
attrs = dict(_ATTRS, **_OUTPUTS),
)
def _validate_options_impl(ctx):
# Bazel won't run our action unless its output is needed, so make a marker file
# We make it a .d.ts file so we can plumb it to the deps of the ts_project compile.
marker = ctx.actions.declare_file("%s.optionsvalid.d.ts" % ctx.label.name)
arguments = ctx.actions.args()
config = struct(
allow_js = ctx.attr.allow_js,
declaration = ctx.attr.declaration,
declaration_map = ctx.attr.declaration_map,
preserve_jsx = ctx.attr.preserve_jsx,
composite = ctx.attr.composite,
emit_declaration_only = ctx.attr.emit_declaration_only,
source_map = ctx.attr.source_map,
incremental = ctx.attr.incremental,
ts_build_info_file = ctx.attr.ts_build_info_file,
)
arguments.add_all([ctx.file.tsconfig.path, marker.path, ctx.attr.target, json.encode(config)])
inputs = _tsconfig_inputs(ctx)
run_node(
ctx,
inputs = inputs,
outputs = [marker],
arguments = [arguments],
executable = "validator",
)
return [
_ValidOptionsInfo(marker = marker),
]
validate_options = rule(
implementation = _validate_options_impl,
attrs = {
"allow_js": attr.bool(),
"composite": attr.bool(),
"declaration": attr.bool(),
"declaration_map": attr.bool(),
"emit_declaration_only": attr.bool(),
"extends": attr.label(allow_files = [".json"]),
"incremental": attr.bool(),
"preserve_jsx": attr.bool(),
"source_map": attr.bool(),
"target": attr.string(),
"ts_build_info_file": attr.string(),
"tsconfig": attr.label(mandatory = True, allow_single_file = [".json"]),
"validator": attr.label(default = Label("//packages/typescript/bin:ts_project_options_validator"), executable = True, cfg = "host"),
},
)
def _is_ts_src(src, allow_js):
if not src.endswith(".d.ts") and (src.endswith(".ts") or src.endswith(".tsx")):
return True
return allow_js and (src.endswith(".js") or src.endswith(".jsx"))
def _is_json_src(src):
return src.endswith(".json")
def _replace_ext(f, ext_map):
cur_ext = f[f.rindex("."):]
new_ext = ext_map.get(cur_ext)
if new_ext != None:
return new_ext
new_ext = ext_map.get("*")
if new_ext != None:
return new_ext
return None
def _out_paths(srcs, outdir, rootdir, allow_js, ext_map):
rootdir_replace_pattern = rootdir + "/" if rootdir else ""
return [
_join(outdir, f[:f.rindex(".")].replace(rootdir_replace_pattern, "") + _replace_ext(f, ext_map))
for f in srcs
if _is_ts_src(f, allow_js)
]
def ts_project_macro(
name = "tsconfig",
tsconfig = None,
srcs = None,
args = [],
deps = [],
extends = None,
allow_js = False,
declaration = False,
source_map = False,
declaration_map = False,
preserve_jsx = False,
composite = False,
incremental = False,
emit_declaration_only = False,
ts_build_info_file = None,
tsc = None,
typescript_package = _DEFAULT_TYPESCRIPT_PACKAGE,
typescript_require_path = "typescript",
validate = True,
supports_workers = False,
declaration_dir = None,
out_dir = None,
root_dir = None,
link_workspace_root = False,
**kwargs):
"""Compiles one TypeScript project using `tsc --project`
This is a drop-in replacement for the `tsc` rule automatically generated for the "typescript"
package, typically loaded from `@npm//typescript:index.bzl`. Unlike bare `tsc`, this rule understands
the Bazel interop mechanism (Providers) so that this rule works with others that produce or consume
TypeScript typings (`.d.ts` files).
Unlike `ts_library`, this rule is the thinnest possible layer of Bazel interoperability on top
of the TypeScript compiler. It shifts the burden of configuring TypeScript into the tsconfig.json file.
See https://github.com/bazelbuild/rules_nodejs/blob/master/docs/TypeScript.md#alternatives
for more details about the trade-offs between the two rules.
Some TypeScript options affect which files are emitted, and Bazel wants to know these ahead-of-time.
So several options from the tsconfig file must be mirrored as attributes to ts_project.
See https://www.typescriptlang.org/v2/en/tsconfig for a listing of the TypeScript options.
Any code that works with `tsc` should work with `ts_project` with a few caveats:
- Bazel requires that the `outDir` (and `declarationDir`) be set to
`bazel-out/[target architecture]/bin/path/to/package`
so we override whatever settings appear in your tsconfig.
- Bazel expects that each output is produced by a single rule.
Thus if you have two `ts_project` rules with overlapping sources (the same `.ts` file
appears in more than one) then you get an error about conflicting `.js` output
files if you try to build both together.
Worse, if you build them separately then the output directory will contain whichever
one you happened to build most recently. This is highly discouraged.
> Note: in order for TypeScript to resolve relative references to the bazel-out folder,
> we recommend that the base tsconfig contain a rootDirs section that includes all
> possible locations they may appear.
>
> We hope this will not be needed in some future release of TypeScript.
> Follow https://github.com/microsoft/TypeScript/issues/37257 for more info.
>
> For example, if the base tsconfig file relative to the workspace root is
> `path/to/tsconfig.json` then you should configure like:
>
> ```
> "compilerOptions": {
> "rootDirs": [
> ".",
> "../../bazel-out/host/bin/path/to",
> "../../bazel-out/darwin-fastbuild/bin/path/to",
> "../../bazel-out/k8-fastbuild/bin/path/to",
> "../../bazel-out/x64_windows-fastbuild/bin/path/to",
> "../../bazel-out/darwin-dbg/bin/path/to",
> "../../bazel-out/k8-dbg/bin/path/to",
> "../../bazel-out/x64_windows-dbg/bin/path/to",
> ]
> }
> ```
>
> See some related discussion including both "rootDirs" and "paths" for a monorepo setup
> using custom import paths:
> https://github.com/bazelbuild/rules_nodejs/issues/2298
### Issues when running non-sandboxed
When using a non-sandboxed spawn strategy (which is the default on Windows), you may
observe these problems which require workarounds:
1) Bazel deletes outputs from the previous execution before running `tsc`.
This causes a problem with TypeScript's incremental mode: if the `.tsbuildinfo` file
is not known to be an output of the rule, then Bazel will leave it in the output
directory, and when `tsc` runs, it may see that the outputs written by the prior
invocation are up-to-date and skip the emit of these files. This will cause Bazel
to intermittently fail with an error that some outputs were not written.
This is why we depend on `composite` and/or `incremental` attributes to be provided,
so we can tell Bazel to expect a `.tsbuildinfo` output to ensure it is deleted before a
subsequent compilation.
At present, we don't do anything useful with the `.tsbuildinfo` output, and this rule
does not actually have incremental behavior. Deleting the file is actually
counter-productive in terms of TypeScript compile performance.
Follow https://github.com/bazelbuild/rules_nodejs/issues/1726
2) When using Project References, TypeScript will expect to verify that the outputs of referenced
projects are up-to-date with respect to their inputs.
(This is true even without using the `--build` option).
When using a non-sandboxed spawn strategy, `tsc` can read the sources from other `ts_project`
rules in your project, and will expect that the `tsconfig.json` file for those references will
indicate where the outputs were written. However the `outDir` is determined by this Bazel rule so
it cannot be known from reading the `tsconfig.json` file.
This problem is manifested as a TypeScript diagnostic like
`error TS6305: Output file '/path/to/execroot/a.d.ts' has not been built from source file '/path/to/execroot/a.ts'.`
As a workaround, you can give the Windows "fastbuild" output directory as the `outDir` in your tsconfig file.
On other platforms, the value isn't read so it does no harm.
See https://github.com/bazelbuild/rules_nodejs/tree/stable/packages/typescript/test/ts_project as an example.
We hope this will be fixed in a future release of TypeScript;
follow https://github.com/microsoft/TypeScript/issues/37378
3) When TypeScript encounters an import statement, it adds the source file resolved by that reference
to the program. However you may have included that source file in a different project, so this causes
the problem mentioned above where a source file is in multiple programs.
(Note, if you use Project References this is not the case, TS will know the referenced
file is part of the other program.)
This will result in duplicate emit for the same file, which produces an error
since the files written to the output tree are read-only.
Workarounds include using using Project References, or simply grouping the whole compilation
into one program (if this doesn't exceed your time budget).
Args:
name: A name for the target.
We recommend you use the basename (no `.json` extension) of the tsconfig file that should be compiled.
srcs: List of labels of TypeScript source files to be provided to the compiler.
If absent, defaults to `**/*.ts[x]` (all TypeScript files in the package).
deps: List of labels of other rules that produce TypeScript typings (.d.ts files)
tsconfig: Label of the tsconfig.json file to use for the compilation
To support "chaining" of more than one extended config, this label could be a target that
provdes `TsConfigInfo` such as `ts_config`.
By default, we assume the tsconfig file is "tsconfig.json" in the same folder as the ts_project rule.
EXPERIMENTAL: generated tsconfig
Instead of a label, you can pass a dictionary of tsconfig keys.
In this case, a tsconfig.json file will be generated for this compilation, in the following way:
- all top-level keys will be copied by converting the dict to json.
So `tsconfig = {"compilerOptions": {"declaration": True}}`
will result in a generated `tsconfig.json` with `{"compilerOptions": {"declaration": true}}`
- each file in srcs will be converted to a relative path in the `files` section.
- the `extends` attribute will be converted to a relative path
Note that you can mix and match attributes and compilerOptions properties, so these are equivalent:
```
ts_project(
tsconfig = {
"compilerOptions": {
"declaration": True,
},
},
)
```
and
```
ts_project(
declaration = True,
)
```
extends: Label of the tsconfig file referenced in the `extends` section of tsconfig
To support "chaining" of more than one extended config, this label could be a target that
provdes `TsConfigInfo` such as `ts_config`.
args: List of strings of additional command-line arguments to pass to tsc.
tsc: Label of the TypeScript compiler binary to run.
For example, `tsc = "@my_deps//typescript/bin:tsc"`
Or you can pass a custom compiler binary instead.
One possible compiler is the Angular compiler, provided by the
`@angular/compiler-cli` package as the `ngc` binary, which can be set typically with
`tsc = "@npm//@angular/compiler-cli/bin:ngc"`
Note that you'll also need to pass `.html` and `.css` files to the `srcs` of the `ts_project`
so that they're declared as inputs for the Angular compiler to read them.
An example can be found in the rules_nodejs repo under `packages/typescript/test/ts_project/ngc`.
> To use the `ngc` program from Angular versions prior to 11, you'll need a fix for
> https://github.com/angular/angular/issues/36290
> To apply the fix, you can use the patch-package package to apply this patch:
> https://gist.github.com/alexeagle/ba44b2601bd7c953d29c6e8ec44d1ef9
typescript_package: Label of the package containing all data deps of tsc.
For example, `typescript_package = "@my_deps//typescript"`
typescript_require_path: Module name which resolves to typescript_package when required
For example, `typescript_require_path = "typescript"`
validate: boolean; whether to check that the tsconfig JSON settings match the attributes on this target.
Set this to `False` to skip running our validator, in case you have a legitimate reason for these to differ,
e.g. you have a setting enabled just for the editor but you want different behavior when Bazel runs `tsc`.
supports_workers: Experimental! Use only with caution.
Allows you to enable the Bazel Persistent Workers strategy for this project.
See https://docs.bazel.build/versions/main/persistent-workers.html
This requires that the tsc binary support a `--watch` option.
NOTE: this does not work on Windows yet.
We will silently fallback to non-worker mode on Windows regardless of the value of this attribute.
Follow https://github.com/bazelbuild/rules_nodejs/issues/2277 for progress on this feature.
root_dir: a string specifying a subdirectory under the input package which should be consider the
root directory of all the input files.
Equivalent to the TypeScript --rootDir option.
By default it is '.', meaning the source directory where the BUILD file lives.
out_dir: a string specifying a subdirectory under the bazel-out folder where outputs are written.
Equivalent to the TypeScript --outDir option.
Note that Bazel always requires outputs be written under a subdirectory matching the input package,
so if your rule appears in path/to/my/package/BUILD.bazel and out_dir = "foo" then the .js files
will appear in bazel-out/[arch]/bin/path/to/my/package/foo/*.js.
By default the out_dir is '.', meaning the packages folder in bazel-out.
allow_js: boolean; Specifies whether TypeScript will read .js and .jsx files. When used with declaration,
TypeScript will generate .d.ts files from .js files.
declaration_dir: a string specifying a subdirectory under the bazel-out folder where generated declaration
outputs are written. Equivalent to the TypeScript --declarationDir option.
By default declarations are written to the out_dir.
declaration: if the `declaration` bit is set in the tsconfig.
Instructs Bazel to expect a `.d.ts` output for each `.ts` source.
source_map: if the `sourceMap` bit is set in the tsconfig.
Instructs Bazel to expect a `.js.map` output for each `.ts` source.
declaration_map: if the `declarationMap` bit is set in the tsconfig.
Instructs Bazel to expect a `.d.ts.map` output for each `.ts` source.
preserve_jsx: if the `jsx` value is set to "preserve" in the tsconfig.
Instructs Bazel to expect a `.jsx` or `.jsx.map` output for each `.tsx` source.
composite: if the `composite` bit is set in the tsconfig.
Instructs Bazel to expect a `.tsbuildinfo` output and a `.d.ts` output for each `.ts` source.
incremental: if the `incremental` bit is set in the tsconfig.
Instructs Bazel to expect a `.tsbuildinfo` output.
emit_declaration_only: if the `emitDeclarationOnly` bit is set in the tsconfig.
Instructs Bazel *not* to expect `.js` or `.js.map` outputs for `.ts` sources.
ts_build_info_file: the user-specified value of `tsBuildInfoFile` from the tsconfig.
Helps Bazel to predict the path where the .tsbuildinfo output is written.
link_workspace_root: Link the workspace root to the bin_dir to support absolute requires like 'my_wksp/path/to/file'.
If source files need to be required then they can be copied to the bin_dir with copy_to_bin.
**kwargs: passed through to underlying rule, allows eg. visibility, tags
"""
if srcs == None:
if allow_js == True:
srcs = native.glob(["**/*.ts", "**/*.tsx", "**/*.js", "**/*.jsx"])
else:
srcs = native.glob(["**/*.ts", "**/*.tsx"])
extra_deps = []
if type(extends) == type([]):
fail("As of rules_nodejs 3.0, extends should have a single value, not a list.\n" +
"Use a ts_config rule to group together a chain of extended tsconfigs.")
if type(tsconfig) == type(dict()):
# Copy attributes <-> tsconfig properties
# TODO: fail if compilerOptions includes a conflict with an attribute?
compiler_options = tsconfig.setdefault("compilerOptions", {})
source_map = compiler_options.setdefault("sourceMap", source_map)
declaration = compiler_options.setdefault("declaration", declaration)
declaration_map = compiler_options.setdefault("declarationMap", declaration_map)
emit_declaration_only = compiler_options.setdefault("emitDeclarationOnly", emit_declaration_only)
allow_js = compiler_options.setdefault("allowJs", allow_js)
# These options are always passed on the tsc command line so don't include them
# in the tsconfig. At best they're redundant, but at worst we'll have a conflict
if "outDir" in compiler_options.keys():
out_dir = compiler_options.pop("outDir")
if "declarationDir" in compiler_options.keys():
declaration_dir = compiler_options.pop("declarationDir")
if "rootDir" in compiler_options.keys():
root_dir = compiler_options.pop("rootDir")
# FIXME: need to remove keys that have a None value?
write_tsconfig(
name = "_gen_tsconfig_%s" % name,
config = tsconfig,
files = [s for s in srcs if _is_ts_src(s, allow_js) or _is_json_src(s)],
extends = Label("%s//%s:%s" % (native.repository_name(), native.package_name(), name)).relative(extends) if extends else None,
out = "tsconfig_%s.json" % name,
)
# From here, tsconfig becomes a file, the same as if the
# user supplied a tsconfig.json InputArtifact
tsconfig = "tsconfig_%s.json" % name
else:
if tsconfig == None:
tsconfig = "tsconfig.json"
if validate:
validate_options(
name = "_validate_%s_options" % name,
target = "//%s:%s" % (native.package_name(), name),
declaration = declaration,
source_map = source_map,
declaration_map = declaration_map,
preserve_jsx = preserve_jsx,
composite = composite,
incremental = incremental,
ts_build_info_file = ts_build_info_file,
emit_declaration_only = emit_declaration_only,
allow_js = allow_js,
tsconfig = tsconfig,
extends = extends,
)
extra_deps.append("_validate_%s_options" % name)
if supports_workers:
tsc_worker = "%s_worker" % name
nodejs_binary(
name = tsc_worker,
data = [
# BEGIN-INTERNAL
# Users get this dependency transitively from @bazel/typescript
# but that's our own code, so we don't.
"@npm//protobufjs",
# END-INTERNAL
Label(typescript_package),
Label("//packages/typescript/internal/worker:filegroup"),
tsconfig,
],
entry_point = Label("//packages/typescript/internal/worker:worker_adapter"),
templated_args = [
"--typescript_require_path",
typescript_require_path,
],
)
tsc = ":" + tsc_worker
typings_out_dir = declaration_dir if declaration_dir else out_dir
tsbuildinfo_path = ts_build_info_file if ts_build_info_file else name + ".tsbuildinfo"
js_outs = []
map_outs = []
typings_outs = []
typing_maps_outs = []
if not emit_declaration_only:
exts = {
"*": ".js",
".jsx": ".jsx",
".tsx": ".jsx",
} if preserve_jsx else {"*": ".js"}
js_outs.extend(_out_paths(srcs, out_dir, root_dir, allow_js, exts))
if source_map and not emit_declaration_only:
exts = {
"*": ".js.map",
".tsx": ".jsx.map",
} if preserve_jsx else {"*": ".js.map"}
map_outs.extend(_out_paths(srcs, out_dir, root_dir, False, exts))
if declaration or composite:
typings_outs.extend(_out_paths(srcs, typings_out_dir, root_dir, allow_js, {"*": ".d.ts"}))
if declaration_map:
typing_maps_outs.extend(_out_paths(srcs, typings_out_dir, root_dir, allow_js, {"*": ".d.ts.map"}))
if not len(js_outs) and not len(typings_outs):
fail("""ts_project target "//{}:{}" is configured to produce no outputs.
Note that ts_project must know the srcs in advance in order to predeclare the outputs.
Check the srcs attribute to see that some .ts files are present (or .js files with allow_js=True).
""".format(native.package_name(), name))
ts_project(
name = name,
srcs = srcs,
args = args,
deps = deps + extra_deps,
tsconfig = tsconfig,
extends = extends,
declaration_dir = declaration_dir,
out_dir = out_dir,
root_dir = root_dir,
js_outs = js_outs,
map_outs = map_outs,
typings_outs = typings_outs,
typing_maps_outs = typing_maps_outs,
buildinfo_out = tsbuildinfo_path if composite or incremental else None,
tsc = tsc,
link_workspace_root = link_workspace_root,
supports_workers = supports_workers,
**kwargs
) | "root_dir": attr.string(), |
kubeadmcontrolplanetemplate_webhook_test.go | /*
Copyright 2021 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1beta1
import (
"testing"
. "github.com/onsi/gomega"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
utilfeature "k8s.io/component-base/featuregate/testing"
"k8s.io/utils/pointer"
"sigs.k8s.io/cluster-api/feature"
)
func TestKubeadmControlPlaneTemplateValidationFeatureGateEnabled(t *testing.T) {
defer utilfeature.SetFeatureGateDuringTest(t, feature.Gates, feature.ClusterTopology, true)()
t.Run("create kubeadmcontrolplanetemplate should pass if gate enabled and valid kubeadmcontrolplanetemplate", func(t *testing.T) {
testnamespace := "test"
g := NewWithT(t)
kcpTemplate := &KubeadmControlPlaneTemplate{
ObjectMeta: metav1.ObjectMeta{
Name: "kubeadmcontrolplanetemplate-test",
Namespace: testnamespace,
},
Spec: KubeadmControlPlaneTemplateSpec{
Template: KubeadmControlPlaneTemplateResource{
Spec: KubeadmControlPlaneSpec{
Replicas: pointer.Int32Ptr(3),
Version: "v1.20.2",
MachineTemplate: KubeadmControlPlaneMachineTemplate{
InfrastructureRef: corev1.ObjectReference{
Name: "machine-infra",
Namespace: testnamespace,
Kind: "TestMachineTemplate",
APIVersion: "test/v1alpha4",
},
},
},
},
},
}
g.Expect(kcpTemplate.ValidateCreate()).To(Succeed())
})
}
func | (t *testing.T) {
// NOTE: ClusterTopology feature flag is disabled by default, thus preventing to create KubeadmControlPlaneTemplate.
t.Run("create kubeadmcontrolplanetemplate should not pass if gate disabled and valid kubeadmcontrolplanetemplate", func(t *testing.T) {
testnamespace := "test"
g := NewWithT(t)
kcpTemplate := &KubeadmControlPlaneTemplate{
ObjectMeta: metav1.ObjectMeta{
Name: "kubeadmcontrolplanetemplate-test",
Namespace: testnamespace,
},
Spec: KubeadmControlPlaneTemplateSpec{
Template: KubeadmControlPlaneTemplateResource{
Spec: KubeadmControlPlaneSpec{
Replicas: pointer.Int32Ptr(2),
Version: "1.20.2",
MachineTemplate: KubeadmControlPlaneMachineTemplate{
InfrastructureRef: corev1.ObjectReference{
Name: "machine-infra",
Namespace: testnamespace,
Kind: "TestMachineTemplate",
APIVersion: "test/v1alpha4",
},
},
},
},
},
}
g.Expect(kcpTemplate.ValidateCreate()).NotTo(Succeed())
})
}
| TestKubeadmControlPlaneTemplateValidationFeatureGateDisabled |
repository.py | from decimal import Decimal
from django.db.models import Sum
from django.shortcuts import get_object_or_404
from datetime import date, timedelta
from .models import Task
class TaskRepository:
"""Repository for tasks."""
def list(self):
return Task.objects.all()
def create(self, title: str, description: str, status: int):
return Task.objects.create(
title=title, description=description, status=status
)
def detail(self, id):
return get_object_or_404(Task, pk=id)
def update(self, request, id):
task = get_object_or_404(Task, pk=id)
task.status = request.data.get('status') | task.save()
return task
def destroy(self, pk=None):
task = Task.objects.get(id=pk)
task.delete() | |
usersync.go | package sharethrough
import (
"text/template"
"github.com/prebid/prebid-server/adapters"
"github.com/prebid/prebid-server/usersync"
)
func | (temp *template.Template) usersync.Usersyncer {
return adapters.NewSyncer("sharethrough", temp, adapters.SyncTypeRedirect)
}
| NewSharethroughSyncer |
criteria.rs | //! This module defines the [`Criterion`] type, which contains all the boolean
//! properties Scryfall supports for searching cards. `Criterion` rarely
//! needs to be used directly, since its operations are also supported by the
//! inner types [`CardIs`] and [`PrintingIs`].
use std::fmt;
use crate::search::param::Param;
use crate::search::query::Query;
/// A search criterion for filtering cards. Each card is tagged with various
/// searchable properties, representing boolean parameters. Some of the criteria
/// are true for each printing of the card (see [`CardIs`]) and others are
/// specific to certain printings (see [`PrintingIs`]).
///
/// The `Criterion` type rarely needs to be used directly, since `CardIs` and
/// `PrintingIs` both implement `Into<`[`Query`]`>`.
///
/// # Examples
///
/// ```rust
/// # use scryfall::search::prelude::*;
/// # fn main() -> scryfall::Result<()> {
/// // Find a random card with Phyrexian mana symbols, available in watermarked foil.
/// let query = Query::And(vec![
/// CardIs::Phyrexian.into(),
/// PrintingIs::Watermark.into(),
/// PrintingIs::Foil.into(),
/// ]);
/// let card: scryfall::Card = query.random()?;
///
/// assert!(
/// card.mana_cost.unwrap().contains("/P")
/// || card.oracle_text.unwrap_or_default().contains("/P")
/// );
/// assert!(card.watermark.is_some());
/// assert!(card.foil);
/// # Ok(())
/// # }
/// ```
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
#[allow(missing_docs)]
pub enum Criterion {
Card(CardIs),
Printing(PrintingIs),
}
impl fmt::Display for Criterion {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Criterion::Card(inner) => fmt::Display::fmt(inner, f),
Criterion::Printing(inner) => fmt::Display::fmt(inner, f),
}
}
}
impl From<Criterion> for Query {
fn from(criterion: Criterion) -> Self {
Query::Param(Param::criterion(criterion))
}
}
/// A search criterion applying to all printings of a card. These criteria
/// have to do with mana costs, abilities, and other properties of cards
/// that don't depend on a specific printing, such as
/// [`Modal`][self::CardIs::Modal], [`Vanilla`][self::CardIs::Vanilla], and
/// [`Reserved`][self::CardIs::Reserved].
///
/// `CardIs` also has a series of variants representing land cycles, including
/// [`FetchLand`][self::CardIs::FetchLand] and
/// [`ShockLand`][self::CardIs::ShockLand].
///
/// `CardIs` implements `Into<`[`Query`]`>`, so it can be used as an argument to
/// boolean methods such as [`not`][crate::search::query::not] and
/// [`Query::and()`].
///
/// # Example
///
/// ```rust
/// # use scryfall::search::prelude::*;
/// # fn main() -> scryfall::Result<()> {
/// let party_member = Query::from(CardIs::Party).and(CardIs::Leveler).random()?;
///
/// assert!(
/// party_member.type_line.contains("Cleric")
/// || party_member.type_line.contains("Rogue")
/// || party_member.type_line.contains("Warrior")
/// || party_member.type_line.contains("Wizard"),
/// );
/// assert!(party_member.keywords.iter().any(|kw| kw == "Level Up"));
/// # Ok(())
/// # }
/// ```
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
#[cfg_attr(test, derive(strum::EnumIter))]
pub enum CardIs {
/// Find cards that have a color indicator.
ColorIndicator,
/// Find cards with an even converted mana cost (zero is even).
EvenCmc,
/// Find cards with an odd converted mana cost.
OddCmc,
/// Find cards that contain Phyrexian mana symbols in their cost or text.
Phyrexian,
/// Find cards with a cost containing hybrid mana symbols.
Hybrid,
/// Find split cards.
Split,
/// Find flip cards.
Flip,
/// Find transforming cards.
Transform,
/// Find modal dual-face cards.
ModalDfc,
/// Find cards with meld.
Meld,
/// Find leveler cards.
Leveler,
/// Find cards that are cast as spells.
Spell,
/// Find permanent cards.
Permanent,
/// Find historic cards.
Historic,
/// Find party cards.
Party,
/// Find cards with modal effects.
Modal,
/// Find vanilla creatures.
Vanilla,
/// Find french vanilla creatures (evergreen keywords only).
FrenchVanilla,
/// Find Un-cards, holiday cards, and other funny cards.
Funny,
/// Find cards that can be your commander.
Commander,
/// Find cards that can be your Brawl commander.
Brawler,
/// Find cards that can be your companion.
Companion,
/// Find cards on the reserved list.
Reserved,
/// A cycling dual land, such as [Fetid Pools](https://scryfall.com/card/akh/243).
BicycleLand,
/// A cycling tri land, such as [Ketria Triome](https://scryfall.com/card/iko/250).
#[doc(alias = "triome")]
TricycleLand,
/// A land that returns other lands to your hand, such as
/// [Boros Garrison](https://scryfall.com/card/rav/275).
BounceLand,
/// A pain land that can be sacrificed to draw a card, such as
/// [Horizon Canopy](https://scryfall.com/card/fut/177).
CanopyLand,
/// A land that enters tapped unless you control a basic of its color, such
/// as [Glacial Fortress](https://scryfall.com/card/m10/226).
CheckLand,
/// An original dual land, such as [Tropical Island](https://scryfall.com/card/lea/283).
DualLand,
/// A land that enters tapped unless you control two or fewer other lands,
/// such as [Blackcleave Cliffs](https://scryfall.com/card/som/224).
FastLand,
/// A fetch land, such as [Scalding Tarn](https://scryfall.com/card/zen/223).
FetchLand,
/// A land that filters mana into other colors, such as
/// [Mystic Gate](https://scryfall.com/card/shm/277) or
/// [Cascading Cataracts](https://scryfall.com/card/akh/240/cascading-cataracts).
FilterLand,
/// A land that enters tapped and gains 1 life, such as
/// [Jungle Hollow](https://scryfall.com/card/ktk/235).
GainLand,
/// A land that costs life for colored mana, such as
/// [Caves of Koilos](https://scryfall.com/card/apc/140).
PainLand,
/// A land that enters tapped and has "Scry 1", such as
/// [Temple of Mystery](https://scryfall.com/card/ths/226).
ScryLand,
/// A land that enters tapped unless you reveal a basic from your hand, such
/// as [Choked Estuary](https://scryfall.com/card/soi/270).
ShadowLand,
/// A land that enters tapped unless you pay 2 life, such as
/// [Breeding Pool](https://scryfall.com/card/dis/172).
ShockLand,
/// A land that allows you to store up mana for later use, such as
/// [Fungal Reaches](https://scryfall.com/card/tsp/273) or
/// [Crucible of the Spirit Dragon](https://scryfall.com/card/frf/167).
StorageLand,
/// A land that turns into a creature, such as
/// [Celestial Colonnade](https://scryfall.com/card/wwk/133),
/// [Mutavault](https://scryfall.com/card/mor/148), or
/// [Inkmoth Nexus](https://scryfall.com/card/mbs/145).
#[doc(alias = "manland")]
CreatureLand,
/// A land that enters tapped and produces three colors, such as
/// [Mystic Monastery](https://scryfall.com/card/ktk/236).
TriLand,
/// A land that enters tapped unless you control two basics in its
/// colors, such as [Canopy Vista](https://scryfall.com/card/bfz/234).
#[doc(alias = "tango")]
BattleLand,
}
impl fmt::Display for CardIs {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}:{}",
match self {
CardIs::ColorIndicator => "has", // Synonym for 'is'.
CardIs::EvenCmc | CardIs::OddCmc => "cmc",
_ => "is",
},
match self {
CardIs::ColorIndicator => "indicator",
CardIs::EvenCmc => "even",
CardIs::OddCmc => "odd",
CardIs::Phyrexian => "phyrexian",
CardIs::Hybrid => "hybrid",
CardIs::Split => "split",
CardIs::Flip => "flip",
CardIs::Transform => "transform",
CardIs::ModalDfc => "modal_dfc",
CardIs::Meld => "meld",
CardIs::Leveler => "leveler",
CardIs::Spell => "spell",
CardIs::Permanent => "permanent",
CardIs::Historic => "historic",
CardIs::Party => "party",
CardIs::Modal => "modal",
CardIs::Vanilla => "vanilla",
CardIs::FrenchVanilla => "french_vanilla",
CardIs::Funny => "funny",
CardIs::Commander => "commander",
CardIs::Brawler => "brawler",
CardIs::Companion => "companion",
CardIs::Reserved => "reserved",
CardIs::BicycleLand => "bicycle_land",
CardIs::TricycleLand => "tricycle_land",
CardIs::BounceLand => "bounce_land",
CardIs::CanopyLand => "canopy_land",
CardIs::CheckLand => "check_land",
CardIs::DualLand => "dual",
CardIs::FastLand => "fast_land",
CardIs::FetchLand => "fetch_land",
CardIs::FilterLand => "filter_land",
CardIs::GainLand => "gain_land",
CardIs::PainLand => "pain_land",
CardIs::ScryLand => "scry_land",
CardIs::ShadowLand => "shadow_land",
CardIs::ShockLand => "shock_land",
CardIs::StorageLand => "storage_land",
CardIs::CreatureLand => "creature_land",
CardIs::TriLand => "tri_land",
CardIs::BattleLand => "battle_land",
}
)
}
}
impl From<CardIs> for Query {
fn from(card: CardIs) -> Self {
Criterion::Card(card).into()
}
}
/// A search criterion applying to a specific printing of a card. These criteria
/// have to do with art, frames, foil, and reprints, and other things
/// that are not true for every printing of a card, including
/// [`FirstPrint`][self::PrintingIs::FirstPrint],
/// [`Watermark`][self::PrintingIs::Watermark], and
/// [`NewArt`][self::PrintingIs::NewArt].
///
/// `PrintingIs` implements `Into<`[`Query`]`>`, so it can be used as an
/// argument to boolean methods such as [`not`][crate::search::query::not] and
/// [`Query::and()`].
///
/// # Example
/// ```rust
/// # use scryfall::search::prelude::*;
/// # fn main() -> scryfall::Result<()> {
/// // Find a card with new artwork by an artist that has illustrated the card before.
/// let artist_redo = not(PrintingIs::NewArtist)
/// .and(PrintingIs::NewArt)
/// .random()?;
///
/// // There should be at least 2 unique artworks of this card by this artist.
/// let all_versions = SearchOptions::new()
/// .query(
/// exact(artist_redo.name.as_str())
/// .and(artist(artist_redo.artist.as_ref().unwrap().as_str())),
/// )
/// .unique(UniqueStrategy::Art)
/// .search_all()?;
/// assert!(all_versions.len() >= 2);
/// # Ok(())
/// # }
/// ```
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
#[cfg_attr(test, derive(strum::EnumIter))]
pub enum PrintingIs {
/// Find printings that are printed for the first time in paper.
NewCard,
/// Find printings using a new rarity (including the first print).
NewRarity,
/// Find printings with new artwork (including the first print).
NewArt,
/// Find printings with an artist who has not illustrated this card before
/// (including the first print).
NewArtist,
/// Find printings with new flavor text (including the first print).
NewFlavor,
/// Find printings with a new frame (including the first print).
NewFrame,
/// Find printings available for the first time in a new language (including
/// the first print).
NewLanguage,
/// Find printings that have a watermark.
Watermark,
/// Find printings with full art.
Full,
/// Find printings that are available in non-foil.
Nonfoil,
/// Find printings that are available in foil.
Foil,
/// Find printings in Scryfall's database with high-resolution images.
HiRes,
/// Find printings that are only available digitally (MTGO and Arena).
Digital,
/// Find promotional printings.
Promo,
/// Find printings that are Story Spotlights in their set.
Spotlight,
/// Find printings that are in the Masterpiece Series.
Masterpiece,
/// Find printings that have only been in a single set.
Unique,
/// Find first printings (digital or paper).
FirstPrint,
/// Find reprints.
Reprint,
}
impl fmt::Display for PrintingIs {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}:{}",
match self {
PrintingIs::NewCard
| PrintingIs::NewRarity
| PrintingIs::NewArt
| PrintingIs::NewArtist
| PrintingIs::NewFlavor
| PrintingIs::NewFrame
| PrintingIs::NewLanguage => "new",
PrintingIs::Watermark => "has", // Synonym for `is`.
_ => "is",
},
match self {
PrintingIs::NewCard => "card",
PrintingIs::NewRarity => "rarity",
PrintingIs::NewArt => "art",
PrintingIs::NewArtist => "artist",
PrintingIs::NewFlavor => "flavor",
PrintingIs::NewFrame => "frame",
PrintingIs::NewLanguage => "language",
PrintingIs::Watermark => "watermark",
PrintingIs::Full => "full",
PrintingIs::Foil => "foil",
PrintingIs::Nonfoil => "nonfoil",
PrintingIs::HiRes => "hires",
PrintingIs::Digital => "digital",
PrintingIs::Promo => "promo",
PrintingIs::Spotlight => "spotlight",
PrintingIs::FirstPrint => "first_print",
PrintingIs::Reprint => "reprint",
PrintingIs::Masterpiece => "masterpiece",
PrintingIs::Unique => "unique",
}
)
}
}
impl From<PrintingIs> for Query {
fn from(printing: PrintingIs) -> Self {
Criterion::Printing(printing).into()
}
}
#[cfg(test)]
mod tests {
use strum::IntoEnumIterator;
use super::*;
use crate::search::Search;
#[test]
#[ignore]
fn all_card_is() {
for criterion in CardIs::iter() {
Query::from(criterion)
.random()
.unwrap_or_else(|_| panic!("Failed to get a card for {}", criterion));
}
}
#[test]
#[ignore]
fn | () {
for criterion in PrintingIs::iter() {
Query::from(criterion)
.random()
.unwrap_or_else(|_| panic!("Failed to get a printing for {}", criterion));
}
}
}
| all_printing_is |
player.go | // Copyright 2017 Hajime Hoshi
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package oto offers io.Writer to play sound on multiple platforms.
package oto
import (
"io"
"runtime"
"github.com/hajimehoshi/oto/internal/mux"
)
// Player is a PCM (pulse-code modulation) audio player.
// Player implements io.WriteCloser.
// Use Write method to play samples.
type Player struct {
context *Context
r mux.LenReader
w io.Writer
}
func | (context *Context) *Player {
r, w := pipe()
p := &Player{
context: context,
r: r,
w: w,
}
runtime.SetFinalizer(p, (*Player).Close)
return p
}
// Write writes PCM samples to the Player.
//
// The format is as follows:
// [data] = [sample 1] [sample 2] [sample 3] ...
// [sample *] = [channel 1] ...
// [channel *] = [byte 1] [byte 2] ...
// Byte ordering is little endian.
//
// The data is first put into the Player's buffer. Once the buffer is full, Player starts playing
// the data and empties the buffer.
//
// If the supplied data doesn't fit into the Player's buffer, Write block until a sufficient amount
// of data has been played (or at least started playing) and the remaining unplayed data fits into
// the buffer.
//
// Note, that the Player won't start playing anything until the buffer is full.
func (p *Player) Write(buf []byte) (int, error) {
return p.w.Write(buf)
}
// Close closes the Player and frees any resources associated with it. The Player is no longer
// usable after calling Close.
func (p *Player) Close() error {
runtime.SetFinalizer(p, nil)
// Already closed
if p.context == nil {
return nil
}
p.context.mux.RemoveSource(p.r)
p.context = nil
return nil
}
func max(a, b int) int {
if a < b {
return b
}
return a
}
func min(a, b int) int {
if a < b {
return a
}
return b
}
| newPlayer |
jit_spec.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import 'reflect-metadata';
import {InjectorDef, defineInjectable} from '@angular/core/src/di/defs';
import {Injectable} from '@angular/core/src/di/injectable';
import {inject, setCurrentInjector} from '@angular/core/src/di/injector';
import {ivyEnabled} from '@angular/core/src/ivy_switch';
import {Component, HostBinding, HostListener, Input, Output, Pipe} from '@angular/core/src/metadata/directives';
import {NgModule, NgModuleDef} from '@angular/core/src/metadata/ng_module';
import {ComponentDef, PipeDef} from '@angular/core/src/render3/interfaces/definition';
ivyEnabled && describe('render3 jit', () => {
let injector: any;
beforeAll(() => { injector = setCurrentInjector(null); });
afterAll(() => { setCurrentInjector(injector); });
it('compiles a component', () => {
@Component({
template: 'test',
selector: 'test-cmp',
})
class SomeCmp {
}
const SomeCmpAny = SomeCmp as any;
expect(SomeCmpAny.ngComponentDef).toBeDefined();
expect(SomeCmpAny.ngComponentDef.factory() instanceof SomeCmp).toBe(true);
});
it('compiles an injectable with a type provider', () => {
@Injectable({providedIn: 'root'})
class | {
}
const ServiceAny = Service as any;
expect(ServiceAny.ngInjectableDef).toBeDefined();
expect(ServiceAny.ngInjectableDef.providedIn).toBe('root');
expect(inject(Service) instanceof Service).toBe(true);
});
it('compiles an injectable with a useValue provider', () => {
@Injectable({providedIn: 'root', useValue: 'test'})
class Service {
}
expect(inject(Service)).toBe('test');
});
it('compiles an injectable with a useExisting provider', () => {
@Injectable({providedIn: 'root', useValue: 'test'})
class Existing {
}
@Injectable({providedIn: 'root', useExisting: Existing})
class Service {
}
expect(inject(Service)).toBe('test');
});
it('compiles an injectable with a useFactory provider, without deps', () => {
@Injectable({providedIn: 'root', useFactory: () => 'test'})
class Service {
}
expect(inject(Service)).toBe('test');
});
it('compiles an injectable with a useFactory provider, with deps', () => {
@Injectable({providedIn: 'root', useValue: 'test'})
class Existing {
}
@Injectable({providedIn: 'root', useFactory: (existing: any) => existing, deps: [Existing]})
class Service {
}
expect(inject(Service)).toBe('test');
});
it('compiles an injectable with a useClass provider, with deps', () => {
@Injectable({providedIn: 'root', useValue: 'test'})
class Existing {
}
class Other {
constructor(public value: any) {}
}
@Injectable({providedIn: 'root', useClass: Other, deps: [Existing]})
class Service {
get value(): any { return null; }
}
const ServiceAny = Service as any;
expect(inject(Service).value).toBe('test');
});
it('compiles an injectable with a useClass provider, without deps', () => {
let _value = 1;
@Injectable({providedIn: 'root'})
class Existing {
readonly value = _value++;
}
@Injectable({providedIn: 'root', useClass: Existing})
class Service {
get value(): number { return 0; }
}
expect(inject(Existing).value).toBe(1);
const injected = inject(Service);
expect(injected instanceof Existing).toBe(true);
expect(injected.value).toBe(2);
});
it('compiles an injectable with an inherited constructor', () => {
@Injectable({providedIn: 'root'})
class Dep {
}
@Injectable()
class Base {
constructor(readonly dep: Dep) {}
}
@Injectable({providedIn: 'root'})
class Child extends Base {
}
expect(inject(Child).dep instanceof Dep).toBe(true);
});
it('compiles a module to a definition', () => {
@Component({
template: 'foo',
selector: 'foo',
})
class Cmp {
}
@NgModule({
declarations: [Cmp],
})
class Module {
}
const moduleDef: NgModuleDef<Module> = (Module as any).ngModuleDef;
expect(moduleDef).toBeDefined();
expect(moduleDef.declarations.length).toBe(1);
expect(moduleDef.declarations[0]).toBe(Cmp);
});
it('compiles a module to an ngInjectorDef with the providers', () => {
class Token {
static ngInjectableDef = defineInjectable({
providedIn: 'root',
factory: () => 'default',
});
}
@NgModule({
providers: [{provide: Token, useValue: 'test'}],
})
class Module {
constructor(public token: Token) {}
}
const injectorDef: InjectorDef<Module> = (Module as any).ngInjectorDef;
const instance = injectorDef.factory();
// Since the instance was created outside of an injector using the module, the
// injection will use the default provider, not the provider from the module.
expect(instance.token).toBe('default');
expect(injectorDef.providers).toEqual([{provide: Token, useValue: 'test'}]);
});
it('patches a module onto the component', () => {
@Component({
template: 'foo',
selector: 'foo',
})
class Cmp {
}
const cmpDef: ComponentDef<Cmp> = (Cmp as any).ngComponentDef;
expect(cmpDef.directiveDefs).toBeNull();
@NgModule({
declarations: [Cmp],
})
class Module {
}
const moduleDef: NgModuleDef<Module> = (Module as any).ngModuleDef;
expect(cmpDef.directiveDefs instanceof Function).toBe(true);
expect((cmpDef.directiveDefs as Function)()).toEqual([cmpDef]);
});
it('should add hostbindings and hostlisteners', () => {
@Component({
template: 'foo',
selector: 'foo',
host: {
'[class.red]': 'isRed',
'(click)': 'onClick()',
},
})
class Cmp {
@HostBinding('class.green')
green: boolean = false;
@HostListener('change', ['$event'])
onChange(event: any): void {}
}
const cmpDef = (Cmp as any).ngComponentDef as ComponentDef<Cmp>;
expect(cmpDef.hostBindings).toBeDefined();
expect(cmpDef.hostBindings !.length).toBe(2);
});
it('should compile @Pipes without errors', () => {
@Pipe({name: 'test-pipe', pure: false})
class P {
}
const pipeDef = (P as any).ngPipeDef as PipeDef<P>;
expect(pipeDef.name).toBe('test-pipe');
expect(pipeDef.pure).toBe(false, 'pipe should not be pure');
expect(pipeDef.factory() instanceof P)
.toBe(true, 'factory() should create an instance of the pipe');
});
it('should default @Pipe to pure: true', () => {
@Pipe({name: 'test-pipe'})
class P {
}
const pipeDef = (P as any).ngPipeDef as PipeDef<P>;
expect(pipeDef.pure).toBe(true, 'pipe should be pure');
});
it('should add ngBaseDef to types with @Input properties', () => {
class C {
@Input('alias1')
prop1 = 'test';
@Input('alias2')
prop2 = 'test';
}
expect((C as any).ngBaseDef).toBeDefined();
expect((C as any).ngBaseDef.inputs).toEqual({prop1: 'alias1', prop2: 'alias2'});
});
it('should add ngBaseDef to types with @Output properties', () => {
class C {
@Output('alias1')
prop1 = 'test';
@Output('alias2')
prop2 = 'test';
}
expect((C as any).ngBaseDef).toBeDefined();
expect((C as any).ngBaseDef.outputs).toEqual({prop1: 'alias1', prop2: 'alias2'});
});
});
it('ensure at least one spec exists', () => {});
| Service |
wumr.rs | #[doc = "Register `WUMR` reader"]
pub struct R(crate::R<WUMR_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<WUMR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<WUMR_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<WUMR_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `WUMR` writer"]
pub struct W(crate::W<WUMR_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<WUMR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<WUMR_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<WUMR_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Supply Monitor Wake-up Enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SMEN_A {
#[doc = "0: The supply monitor detection has no wake-up effect."]
NOT_ENABLE = 0,
#[doc = "1: The supply monitor detection forces the wake-up of the core power supply."]
ENABLE = 1,
}
impl From<SMEN_A> for bool {
#[inline(always)]
fn from(variant: SMEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `SMEN` reader - Supply Monitor Wake-up Enable"]
pub struct SMEN_R(crate::FieldReader<bool, SMEN_A>);
impl SMEN_R {
pub(crate) fn new(bits: bool) -> Self {
SMEN_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SMEN_A {
match self.bits {
false => SMEN_A::NOT_ENABLE,
true => SMEN_A::ENABLE,
}
}
#[doc = "Checks if the value of the field is `NOT_ENABLE`"]
#[inline(always)]
pub fn is_not_enable(&self) -> bool {
**self == SMEN_A::NOT_ENABLE
}
#[doc = "Checks if the value of the field is `ENABLE`"]
#[inline(always)]
pub fn is_enable(&self) -> bool {
**self == SMEN_A::ENABLE
}
}
impl core::ops::Deref for SMEN_R {
type Target = crate::FieldReader<bool, SMEN_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SMEN` writer - Supply Monitor Wake-up Enable"]
pub struct SMEN_W<'a> {
w: &'a mut W,
}
impl<'a> SMEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SMEN_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "The supply monitor detection has no wake-up effect."]
#[inline(always)]
pub fn not_enable(self) -> &'a mut W {
self.variant(SMEN_A::NOT_ENABLE)
}
#[doc = "The supply monitor detection forces the wake-up of the core power supply."]
#[inline(always)]
pub fn enable(self) -> &'a mut W {
self.variant(SMEN_A::ENABLE)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | ((value as u32 & 0x01) << 1);
self.w
}
}
#[doc = "Real-time Timer Wake-up Enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RTTEN_A {
#[doc = "0: The RTT alarm signal has no wake-up effect."]
NOT_ENABLE = 0,
#[doc = "1: The RTT alarm signal forces the wake-up of the core power supply."]
ENABLE = 1,
}
impl From<RTTEN_A> for bool {
#[inline(always)]
fn from(variant: RTTEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `RTTEN` reader - Real-time Timer Wake-up Enable"]
pub struct RTTEN_R(crate::FieldReader<bool, RTTEN_A>);
impl RTTEN_R {
pub(crate) fn new(bits: bool) -> Self {
RTTEN_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RTTEN_A {
match self.bits {
false => RTTEN_A::NOT_ENABLE,
true => RTTEN_A::ENABLE,
}
}
#[doc = "Checks if the value of the field is `NOT_ENABLE`"]
#[inline(always)]
pub fn is_not_enable(&self) -> bool {
**self == RTTEN_A::NOT_ENABLE
}
#[doc = "Checks if the value of the field is `ENABLE`"]
#[inline(always)]
pub fn is_enable(&self) -> bool {
**self == RTTEN_A::ENABLE
}
}
impl core::ops::Deref for RTTEN_R {
type Target = crate::FieldReader<bool, RTTEN_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `RTTEN` writer - Real-time Timer Wake-up Enable"]
pub struct RTTEN_W<'a> {
w: &'a mut W,
}
impl<'a> RTTEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RTTEN_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "The RTT alarm signal has no wake-up effect."]
#[inline(always)]
pub fn not_enable(self) -> &'a mut W {
self.variant(RTTEN_A::NOT_ENABLE)
}
#[doc = "The RTT alarm signal forces the wake-up of the core power supply."]
#[inline(always)]
pub fn enable(self) -> &'a mut W {
self.variant(RTTEN_A::ENABLE)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | ((value as u32 & 0x01) << 2);
self.w
}
}
#[doc = "Real-time Clock Wake-up Enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RTCEN_A {
#[doc = "0: The RTC alarm signal has no wake-up effect."]
NOT_ENABLE = 0,
#[doc = "1: The RTC alarm signal forces the wake-up of the core power supply."]
ENABLE = 1,
}
impl From<RTCEN_A> for bool {
#[inline(always)]
fn from(variant: RTCEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `RTCEN` reader - Real-time Clock Wake-up Enable"]
pub struct RTCEN_R(crate::FieldReader<bool, RTCEN_A>); | }
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RTCEN_A {
match self.bits {
false => RTCEN_A::NOT_ENABLE,
true => RTCEN_A::ENABLE,
}
}
#[doc = "Checks if the value of the field is `NOT_ENABLE`"]
#[inline(always)]
pub fn is_not_enable(&self) -> bool {
**self == RTCEN_A::NOT_ENABLE
}
#[doc = "Checks if the value of the field is `ENABLE`"]
#[inline(always)]
pub fn is_enable(&self) -> bool {
**self == RTCEN_A::ENABLE
}
}
impl core::ops::Deref for RTCEN_R {
type Target = crate::FieldReader<bool, RTCEN_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `RTCEN` writer - Real-time Clock Wake-up Enable"]
pub struct RTCEN_W<'a> {
w: &'a mut W,
}
impl<'a> RTCEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RTCEN_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "The RTC alarm signal has no wake-up effect."]
#[inline(always)]
pub fn not_enable(self) -> &'a mut W {
self.variant(RTCEN_A::NOT_ENABLE)
}
#[doc = "The RTC alarm signal forces the wake-up of the core power supply."]
#[inline(always)]
pub fn enable(self) -> &'a mut W {
self.variant(RTCEN_A::ENABLE)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | ((value as u32 & 0x01) << 3);
self.w
}
}
#[doc = "Low-power Debouncer Enable WKUP0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LPDBCEN0_A {
#[doc = "0: The WKUP0 input pin is not connected to the low-power debouncer."]
NOT_ENABLE = 0,
#[doc = "1: The WKUP0 input pin is connected to the low-power debouncer and forces a system wake-up."]
ENABLE = 1,
}
impl From<LPDBCEN0_A> for bool {
#[inline(always)]
fn from(variant: LPDBCEN0_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `LPDBCEN0` reader - Low-power Debouncer Enable WKUP0"]
pub struct LPDBCEN0_R(crate::FieldReader<bool, LPDBCEN0_A>);
impl LPDBCEN0_R {
pub(crate) fn new(bits: bool) -> Self {
LPDBCEN0_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LPDBCEN0_A {
match self.bits {
false => LPDBCEN0_A::NOT_ENABLE,
true => LPDBCEN0_A::ENABLE,
}
}
#[doc = "Checks if the value of the field is `NOT_ENABLE`"]
#[inline(always)]
pub fn is_not_enable(&self) -> bool {
**self == LPDBCEN0_A::NOT_ENABLE
}
#[doc = "Checks if the value of the field is `ENABLE`"]
#[inline(always)]
pub fn is_enable(&self) -> bool {
**self == LPDBCEN0_A::ENABLE
}
}
impl core::ops::Deref for LPDBCEN0_R {
type Target = crate::FieldReader<bool, LPDBCEN0_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `LPDBCEN0` writer - Low-power Debouncer Enable WKUP0"]
pub struct LPDBCEN0_W<'a> {
w: &'a mut W,
}
impl<'a> LPDBCEN0_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LPDBCEN0_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "The WKUP0 input pin is not connected to the low-power debouncer."]
#[inline(always)]
pub fn not_enable(self) -> &'a mut W {
self.variant(LPDBCEN0_A::NOT_ENABLE)
}
#[doc = "The WKUP0 input pin is connected to the low-power debouncer and forces a system wake-up."]
#[inline(always)]
pub fn enable(self) -> &'a mut W {
self.variant(LPDBCEN0_A::ENABLE)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | ((value as u32 & 0x01) << 5);
self.w
}
}
#[doc = "Low-power Debouncer Enable WKUP1\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LPDBCEN1_A {
#[doc = "0: The WKUP1 input pin is not connected to the low-power debouncer."]
NOT_ENABLE = 0,
#[doc = "1: The WKUP1 input pin is connected to the low-power debouncer and forces a system wake-up."]
ENABLE = 1,
}
impl From<LPDBCEN1_A> for bool {
#[inline(always)]
fn from(variant: LPDBCEN1_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `LPDBCEN1` reader - Low-power Debouncer Enable WKUP1"]
pub struct LPDBCEN1_R(crate::FieldReader<bool, LPDBCEN1_A>);
impl LPDBCEN1_R {
pub(crate) fn new(bits: bool) -> Self {
LPDBCEN1_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LPDBCEN1_A {
match self.bits {
false => LPDBCEN1_A::NOT_ENABLE,
true => LPDBCEN1_A::ENABLE,
}
}
#[doc = "Checks if the value of the field is `NOT_ENABLE`"]
#[inline(always)]
pub fn is_not_enable(&self) -> bool {
**self == LPDBCEN1_A::NOT_ENABLE
}
#[doc = "Checks if the value of the field is `ENABLE`"]
#[inline(always)]
pub fn is_enable(&self) -> bool {
**self == LPDBCEN1_A::ENABLE
}
}
impl core::ops::Deref for LPDBCEN1_R {
type Target = crate::FieldReader<bool, LPDBCEN1_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `LPDBCEN1` writer - Low-power Debouncer Enable WKUP1"]
pub struct LPDBCEN1_W<'a> {
w: &'a mut W,
}
impl<'a> LPDBCEN1_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LPDBCEN1_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "The WKUP1 input pin is not connected to the low-power debouncer."]
#[inline(always)]
pub fn not_enable(self) -> &'a mut W {
self.variant(LPDBCEN1_A::NOT_ENABLE)
}
#[doc = "The WKUP1 input pin is connected to the low-power debouncer and forces a system wake-up."]
#[inline(always)]
pub fn enable(self) -> &'a mut W {
self.variant(LPDBCEN1_A::ENABLE)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | ((value as u32 & 0x01) << 6);
self.w
}
}
#[doc = "Low-power Debouncer Clear\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LPDBCCLR_A {
#[doc = "0: A low-power debounce event does not create an immediate clear on the first half of GPBR registers."]
NOT_ENABLE = 0,
#[doc = "1: A low-power debounce event on WKUP0 or WKUP1 generates an immediate clear on the first half of GPBR registers."]
ENABLE = 1,
}
impl From<LPDBCCLR_A> for bool {
#[inline(always)]
fn from(variant: LPDBCCLR_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `LPDBCCLR` reader - Low-power Debouncer Clear"]
pub struct LPDBCCLR_R(crate::FieldReader<bool, LPDBCCLR_A>);
impl LPDBCCLR_R {
pub(crate) fn new(bits: bool) -> Self {
LPDBCCLR_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LPDBCCLR_A {
match self.bits {
false => LPDBCCLR_A::NOT_ENABLE,
true => LPDBCCLR_A::ENABLE,
}
}
#[doc = "Checks if the value of the field is `NOT_ENABLE`"]
#[inline(always)]
pub fn is_not_enable(&self) -> bool {
**self == LPDBCCLR_A::NOT_ENABLE
}
#[doc = "Checks if the value of the field is `ENABLE`"]
#[inline(always)]
pub fn is_enable(&self) -> bool {
**self == LPDBCCLR_A::ENABLE
}
}
impl core::ops::Deref for LPDBCCLR_R {
type Target = crate::FieldReader<bool, LPDBCCLR_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `LPDBCCLR` writer - Low-power Debouncer Clear"]
pub struct LPDBCCLR_W<'a> {
w: &'a mut W,
}
impl<'a> LPDBCCLR_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LPDBCCLR_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "A low-power debounce event does not create an immediate clear on the first half of GPBR registers."]
#[inline(always)]
pub fn not_enable(self) -> &'a mut W {
self.variant(LPDBCCLR_A::NOT_ENABLE)
}
#[doc = "A low-power debounce event on WKUP0 or WKUP1 generates an immediate clear on the first half of GPBR registers."]
#[inline(always)]
pub fn enable(self) -> &'a mut W {
self.variant(LPDBCCLR_A::ENABLE)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | ((value as u32 & 0x01) << 7);
self.w
}
}
#[doc = "Wake-up Inputs Debouncer Period\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum WKUPDBC_A {
#[doc = "0: Immediate, no debouncing, detected active at least on one Slow Clock edge."]
IMMEDIATE = 0,
#[doc = "1: WKUPx shall be in its active state for at least 3 SLCK periods"]
_3_SLCK = 1,
#[doc = "2: WKUPx shall be in its active state for at least 32 SLCK periods"]
_32_SLCK = 2,
#[doc = "3: WKUPx shall be in its active state for at least 512 SLCK periods"]
_512_SLCK = 3,
#[doc = "4: WKUPx shall be in its active state for at least 4,096 SLCK periods"]
_4096_SLCK = 4,
#[doc = "5: WKUPx shall be in its active state for at least 32,768 SLCK periods"]
_32768_SLCK = 5,
}
impl From<WKUPDBC_A> for u8 {
#[inline(always)]
fn from(variant: WKUPDBC_A) -> Self {
variant as _
}
}
#[doc = "Field `WKUPDBC` reader - Wake-up Inputs Debouncer Period"]
pub struct WKUPDBC_R(crate::FieldReader<u8, WKUPDBC_A>);
impl WKUPDBC_R {
pub(crate) fn new(bits: u8) -> Self {
WKUPDBC_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<WKUPDBC_A> {
match self.bits {
0 => Some(WKUPDBC_A::IMMEDIATE),
1 => Some(WKUPDBC_A::_3_SLCK),
2 => Some(WKUPDBC_A::_32_SLCK),
3 => Some(WKUPDBC_A::_512_SLCK),
4 => Some(WKUPDBC_A::_4096_SLCK),
5 => Some(WKUPDBC_A::_32768_SLCK),
_ => None,
}
}
#[doc = "Checks if the value of the field is `IMMEDIATE`"]
#[inline(always)]
pub fn is_immediate(&self) -> bool {
**self == WKUPDBC_A::IMMEDIATE
}
#[doc = "Checks if the value of the field is `_3_SLCK`"]
#[inline(always)]
pub fn is_3_slck(&self) -> bool {
**self == WKUPDBC_A::_3_SLCK
}
#[doc = "Checks if the value of the field is `_32_SLCK`"]
#[inline(always)]
pub fn is_32_slck(&self) -> bool {
**self == WKUPDBC_A::_32_SLCK
}
#[doc = "Checks if the value of the field is `_512_SLCK`"]
#[inline(always)]
pub fn is_512_slck(&self) -> bool {
**self == WKUPDBC_A::_512_SLCK
}
#[doc = "Checks if the value of the field is `_4096_SLCK`"]
#[inline(always)]
pub fn is_4096_slck(&self) -> bool {
**self == WKUPDBC_A::_4096_SLCK
}
#[doc = "Checks if the value of the field is `_32768_SLCK`"]
#[inline(always)]
pub fn is_32768_slck(&self) -> bool {
**self == WKUPDBC_A::_32768_SLCK
}
}
impl core::ops::Deref for WKUPDBC_R {
type Target = crate::FieldReader<u8, WKUPDBC_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `WKUPDBC` writer - Wake-up Inputs Debouncer Period"]
pub struct WKUPDBC_W<'a> {
w: &'a mut W,
}
impl<'a> WKUPDBC_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: WKUPDBC_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "Immediate, no debouncing, detected active at least on one Slow Clock edge."]
#[inline(always)]
pub fn immediate(self) -> &'a mut W {
self.variant(WKUPDBC_A::IMMEDIATE)
}
#[doc = "WKUPx shall be in its active state for at least 3 SLCK periods"]
#[inline(always)]
pub fn _3_slck(self) -> &'a mut W {
self.variant(WKUPDBC_A::_3_SLCK)
}
#[doc = "WKUPx shall be in its active state for at least 32 SLCK periods"]
#[inline(always)]
pub fn _32_slck(self) -> &'a mut W {
self.variant(WKUPDBC_A::_32_SLCK)
}
#[doc = "WKUPx shall be in its active state for at least 512 SLCK periods"]
#[inline(always)]
pub fn _512_slck(self) -> &'a mut W {
self.variant(WKUPDBC_A::_512_SLCK)
}
#[doc = "WKUPx shall be in its active state for at least 4,096 SLCK periods"]
#[inline(always)]
pub fn _4096_slck(self) -> &'a mut W {
self.variant(WKUPDBC_A::_4096_SLCK)
}
#[doc = "WKUPx shall be in its active state for at least 32,768 SLCK periods"]
#[inline(always)]
pub fn _32768_slck(self) -> &'a mut W {
self.variant(WKUPDBC_A::_32768_SLCK)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 12)) | ((value as u32 & 0x07) << 12);
self.w
}
}
#[doc = "Low-power Debouncer Period\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum LPDBC_A {
#[doc = "0: Disable the low-power debouncers."]
DISABLE = 0,
#[doc = "1: WKUP0/1 in active state for at least 2 RTCOUTx clock periods"]
_2_RTCOUT = 1,
#[doc = "2: WKUP0/1 in active state for at least 3 RTCOUTx clock periods"]
_3_RTCOUT = 2,
#[doc = "3: WKUP0/1 in active state for at least 4 RTCOUTx clock periods"]
_4_RTCOUT = 3,
#[doc = "4: WKUP0/1 in active state for at least 5 RTCOUTx clock periods"]
_5_RTCOUT = 4,
#[doc = "5: WKUP0/1 in active state for at least 6 RTCOUTx clock periods"]
_6_RTCOUT = 5,
#[doc = "6: WKUP0/1 in active state for at least 7 RTCOUTx clock periods"]
_7_RTCOUT = 6,
#[doc = "7: WKUP0/1 in active state for at least 8 RTCOUTx clock periods"]
_8_RTCOUT = 7,
}
impl From<LPDBC_A> for u8 {
#[inline(always)]
fn from(variant: LPDBC_A) -> Self {
variant as _
}
}
#[doc = "Field `LPDBC` reader - Low-power Debouncer Period"]
pub struct LPDBC_R(crate::FieldReader<u8, LPDBC_A>);
impl LPDBC_R {
pub(crate) fn new(bits: u8) -> Self {
LPDBC_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LPDBC_A {
match self.bits {
0 => LPDBC_A::DISABLE,
1 => LPDBC_A::_2_RTCOUT,
2 => LPDBC_A::_3_RTCOUT,
3 => LPDBC_A::_4_RTCOUT,
4 => LPDBC_A::_5_RTCOUT,
5 => LPDBC_A::_6_RTCOUT,
6 => LPDBC_A::_7_RTCOUT,
7 => LPDBC_A::_8_RTCOUT,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `DISABLE`"]
#[inline(always)]
pub fn is_disable(&self) -> bool {
**self == LPDBC_A::DISABLE
}
#[doc = "Checks if the value of the field is `_2_RTCOUT`"]
#[inline(always)]
pub fn is_2_rtcout(&self) -> bool {
**self == LPDBC_A::_2_RTCOUT
}
#[doc = "Checks if the value of the field is `_3_RTCOUT`"]
#[inline(always)]
pub fn is_3_rtcout(&self) -> bool {
**self == LPDBC_A::_3_RTCOUT
}
#[doc = "Checks if the value of the field is `_4_RTCOUT`"]
#[inline(always)]
pub fn is_4_rtcout(&self) -> bool {
**self == LPDBC_A::_4_RTCOUT
}
#[doc = "Checks if the value of the field is `_5_RTCOUT`"]
#[inline(always)]
pub fn is_5_rtcout(&self) -> bool {
**self == LPDBC_A::_5_RTCOUT
}
#[doc = "Checks if the value of the field is `_6_RTCOUT`"]
#[inline(always)]
pub fn is_6_rtcout(&self) -> bool {
**self == LPDBC_A::_6_RTCOUT
}
#[doc = "Checks if the value of the field is `_7_RTCOUT`"]
#[inline(always)]
pub fn is_7_rtcout(&self) -> bool {
**self == LPDBC_A::_7_RTCOUT
}
#[doc = "Checks if the value of the field is `_8_RTCOUT`"]
#[inline(always)]
pub fn is_8_rtcout(&self) -> bool {
**self == LPDBC_A::_8_RTCOUT
}
}
impl core::ops::Deref for LPDBC_R {
type Target = crate::FieldReader<u8, LPDBC_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `LPDBC` writer - Low-power Debouncer Period"]
pub struct LPDBC_W<'a> {
w: &'a mut W,
}
impl<'a> LPDBC_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LPDBC_A) -> &'a mut W {
self.bits(variant.into())
}
#[doc = "Disable the low-power debouncers."]
#[inline(always)]
pub fn disable(self) -> &'a mut W {
self.variant(LPDBC_A::DISABLE)
}
#[doc = "WKUP0/1 in active state for at least 2 RTCOUTx clock periods"]
#[inline(always)]
pub fn _2_rtcout(self) -> &'a mut W {
self.variant(LPDBC_A::_2_RTCOUT)
}
#[doc = "WKUP0/1 in active state for at least 3 RTCOUTx clock periods"]
#[inline(always)]
pub fn _3_rtcout(self) -> &'a mut W {
self.variant(LPDBC_A::_3_RTCOUT)
}
#[doc = "WKUP0/1 in active state for at least 4 RTCOUTx clock periods"]
#[inline(always)]
pub fn _4_rtcout(self) -> &'a mut W {
self.variant(LPDBC_A::_4_RTCOUT)
}
#[doc = "WKUP0/1 in active state for at least 5 RTCOUTx clock periods"]
#[inline(always)]
pub fn _5_rtcout(self) -> &'a mut W {
self.variant(LPDBC_A::_5_RTCOUT)
}
#[doc = "WKUP0/1 in active state for at least 6 RTCOUTx clock periods"]
#[inline(always)]
pub fn _6_rtcout(self) -> &'a mut W {
self.variant(LPDBC_A::_6_RTCOUT)
}
#[doc = "WKUP0/1 in active state for at least 7 RTCOUTx clock periods"]
#[inline(always)]
pub fn _7_rtcout(self) -> &'a mut W {
self.variant(LPDBC_A::_7_RTCOUT)
}
#[doc = "WKUP0/1 in active state for at least 8 RTCOUTx clock periods"]
#[inline(always)]
pub fn _8_rtcout(self) -> &'a mut W {
self.variant(LPDBC_A::_8_RTCOUT)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 16)) | ((value as u32 & 0x07) << 16);
self.w
}
}
impl R {
#[doc = "Bit 1 - Supply Monitor Wake-up Enable"]
#[inline(always)]
pub fn smen(&self) -> SMEN_R {
SMEN_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Real-time Timer Wake-up Enable"]
#[inline(always)]
pub fn rtten(&self) -> RTTEN_R {
RTTEN_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Real-time Clock Wake-up Enable"]
#[inline(always)]
pub fn rtcen(&self) -> RTCEN_R {
RTCEN_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 5 - Low-power Debouncer Enable WKUP0"]
#[inline(always)]
pub fn lpdbcen0(&self) -> LPDBCEN0_R {
LPDBCEN0_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - Low-power Debouncer Enable WKUP1"]
#[inline(always)]
pub fn lpdbcen1(&self) -> LPDBCEN1_R {
LPDBCEN1_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - Low-power Debouncer Clear"]
#[inline(always)]
pub fn lpdbcclr(&self) -> LPDBCCLR_R {
LPDBCCLR_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bits 12:14 - Wake-up Inputs Debouncer Period"]
#[inline(always)]
pub fn wkupdbc(&self) -> WKUPDBC_R {
WKUPDBC_R::new(((self.bits >> 12) & 0x07) as u8)
}
#[doc = "Bits 16:18 - Low-power Debouncer Period"]
#[inline(always)]
pub fn lpdbc(&self) -> LPDBC_R {
LPDBC_R::new(((self.bits >> 16) & 0x07) as u8)
}
}
impl W {
#[doc = "Bit 1 - Supply Monitor Wake-up Enable"]
#[inline(always)]
pub fn smen(&mut self) -> SMEN_W {
SMEN_W { w: self }
}
#[doc = "Bit 2 - Real-time Timer Wake-up Enable"]
#[inline(always)]
pub fn rtten(&mut self) -> RTTEN_W {
RTTEN_W { w: self }
}
#[doc = "Bit 3 - Real-time Clock Wake-up Enable"]
#[inline(always)]
pub fn rtcen(&mut self) -> RTCEN_W {
RTCEN_W { w: self }
}
#[doc = "Bit 5 - Low-power Debouncer Enable WKUP0"]
#[inline(always)]
pub fn lpdbcen0(&mut self) -> LPDBCEN0_W {
LPDBCEN0_W { w: self }
}
#[doc = "Bit 6 - Low-power Debouncer Enable WKUP1"]
#[inline(always)]
pub fn lpdbcen1(&mut self) -> LPDBCEN1_W {
LPDBCEN1_W { w: self }
}
#[doc = "Bit 7 - Low-power Debouncer Clear"]
#[inline(always)]
pub fn lpdbcclr(&mut self) -> LPDBCCLR_W {
LPDBCCLR_W { w: self }
}
#[doc = "Bits 12:14 - Wake-up Inputs Debouncer Period"]
#[inline(always)]
pub fn wkupdbc(&mut self) -> WKUPDBC_W {
WKUPDBC_W { w: self }
}
#[doc = "Bits 16:18 - Low-power Debouncer Period"]
#[inline(always)]
pub fn lpdbc(&mut self) -> LPDBC_W {
LPDBC_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Supply Controller Wake-up Mode Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [wumr](index.html) module"]
pub struct WUMR_SPEC;
impl crate::RegisterSpec for WUMR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [wumr::R](R) reader structure"]
impl crate::Readable for WUMR_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [wumr::W](W) writer structure"]
impl crate::Writable for WUMR_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets WUMR to value 0"]
impl crate::Resettable for WUMR_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
} | impl RTCEN_R {
pub(crate) fn new(bits: bool) -> Self {
RTCEN_R(crate::FieldReader::new(bits)) |
wasm_testsuite.rs | extern crate cranelift_codegen;
extern crate cranelift_wasm;
#[macro_use]
extern crate target_lexicon;
extern crate wabt;
use cranelift_codegen::isa;
use cranelift_codegen::print_errors::pretty_verifier_error;
use cranelift_codegen::settings::{self, Configurable, Flags};
use cranelift_codegen::verifier;
use cranelift_wasm::{translate_module, DummyEnvironment};
use std::fs;
use std::fs::File;
use std::io;
use std::io::prelude::*;
use std::path::Path;
use std::str::FromStr;
use wabt::wat2wasm;
#[test]
fn testsuite() {
let mut paths: Vec<_> = fs::read_dir("../../wasmtests")
.unwrap()
.map(|r| r.unwrap())
.filter(|p| {
// Ignore files starting with `.`, which could be editor temporary files
if let Some(stem) = p.path().file_stem() {
if let Some(stemstr) = stem.to_str() {
return !stemstr.starts_with('.');
}
}
false
}).collect();
paths.sort_by_key(|dir| dir.path());
let flags = Flags::new(settings::builder());
for path in paths {
let path = path.path();
handle_module(&path, &flags);
}
}
#[test]
fn return_at_end() {
let mut flag_builder = settings::builder();
flag_builder.enable("return_at_end").unwrap();
let flags = Flags::new(flag_builder);
handle_module(Path::new("../../wasmtests/return_at_end.wat"), &flags);
}
fn read_file(path: &Path) -> io::Result<Vec<u8>> |
fn handle_module(path: &Path, flags: &Flags) {
let data = match path.extension() {
None => {
panic!("the file extension is not wasm or wat");
}
Some(ext) => match ext.to_str() {
Some("wasm") => read_file(path).expect("error reading wasm file"),
Some("wat") => {
let wat = read_file(path).expect("error reading wat file");
match wat2wasm(&wat) {
Ok(wasm) => wasm,
Err(e) => {
panic!("error converting wat to wasm: {:?}", e);
}
}
}
None | Some(&_) => panic!("the file extension for {:?} is not wasm or wat", path),
},
};
let mut dummy_environ = DummyEnvironment::with_triple_flags(triple!("riscv64"), flags.clone());
translate_module(&data, &mut dummy_environ).unwrap();
let isa = isa::lookup(dummy_environ.info.triple)
.unwrap()
.finish(dummy_environ.info.flags);
for func in dummy_environ.info.function_bodies.values() {
verifier::verify_function(func, &*isa)
.map_err(|errors| panic!(pretty_verifier_error(func, Some(&*isa), None, errors)))
.unwrap();
}
}
| {
let mut buf: Vec<u8> = Vec::new();
let mut file = File::open(path)?;
file.read_to_end(&mut buf)?;
Ok(buf)
} |
time-table-admin.component.ts | import { Component, OnInit } from '@angular/core';
import { ActivatedRoute, Router } from '@angular/router';
import { TimeTable } from 'app/student/timetable/models/TimeTable';
import { CourseService } from 'app/student/course/services/course.service';
import { AdminService } from '../services/admin-service.service';
import { CourseAdmin } from '../model/CourseAdmin';
@Component({
selector: 'app-time-table-admin',
templateUrl: './time-table-admin.component.html',
styleUrls: ['./time-table-admin.component.scss']
})
export class TimeTableAdminComponent implements OnInit {
private course: CourseAdmin = new CourseAdmin;
private timeTable:TimeTable[];
private flag: boolean = false;
private isFilled:boolean;
private alert:string;
constructor(
private route: ActivatedRoute,
private router: Router,
private _courseService: CourseService,
private _adminService:AdminService
) { }
ngOnInit() {
this._courseService.getAllCourse().subscribe((data) => {
this.route.params.subscribe((param) => { | this.course.courseId = Number(course['courseId']);
this.course.courseName = course['courseName'];
//console.log(this.course.courseName);
this.flag = true;
}
}
if (this.flag === false) {
this.router.navigate(['/','courses']);
}
this._adminService.getTimeTable(this.course.courseId).subscribe(
(data) => {
if (data === null) { //if feedback is not filled
this.isFilled = false;
}
else { // if feedback is already filled
this.timeTable = data;
this.isFilled = true;
}
});
})
});
}
public submitTimeTable(){
this.timeTable.forEach(data=>{
data.courseId=Number(this.course.courseId);
})
this._adminService.updateTimeTable(this.timeTable).subscribe(
(data) => {
this.router.navigate(['/admin/timetable']);
},
(err) => {
console.log(err);
}
);
}
} | for (const [key, course] of Object.entries(data)) {
//console.log(course['courseId']);
if (course['courseId'] == param['courseId']) { |
errors.pb.go | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.25.0
// protoc v3.18.1
// source: google/ads/googleads/v8/errors/errors.proto
package errors
import (
proto "github.com/golang/protobuf/proto"
common "github.com/scotthenley/go-googleads/pb/v8/common"
enums "github.com/scotthenley/go-googleads/pb/v8/enums"
_ "google.golang.org/genproto/googleapis/api/annotations"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
durationpb "google.golang.org/protobuf/types/known/durationpb"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// This is a compile-time assertion that a sufficiently up-to-date version
// of the legacy proto package is being used.
const _ = proto.ProtoPackageIsVersion4
// Enum of possible scopes that quota buckets belong to.
type QuotaErrorDetails_QuotaRateScope int32
const (
// Unspecified enum
QuotaErrorDetails_UNSPECIFIED QuotaErrorDetails_QuotaRateScope = 0
// Used for return value only. Represents value unknown in this version.
QuotaErrorDetails_UNKNOWN QuotaErrorDetails_QuotaRateScope = 1
// Per customer account quota
QuotaErrorDetails_ACCOUNT QuotaErrorDetails_QuotaRateScope = 2
// Per project or DevToken quota
QuotaErrorDetails_DEVELOPER QuotaErrorDetails_QuotaRateScope = 3
)
// Enum value maps for QuotaErrorDetails_QuotaRateScope.
var (
QuotaErrorDetails_QuotaRateScope_name = map[int32]string{
0: "UNSPECIFIED",
1: "UNKNOWN",
2: "ACCOUNT",
3: "DEVELOPER",
}
QuotaErrorDetails_QuotaRateScope_value = map[string]int32{
"UNSPECIFIED": 0,
"UNKNOWN": 1,
"ACCOUNT": 2,
"DEVELOPER": 3,
}
)
func (x QuotaErrorDetails_QuotaRateScope) Enum() *QuotaErrorDetails_QuotaRateScope {
p := new(QuotaErrorDetails_QuotaRateScope)
*p = x
return p
}
func (x QuotaErrorDetails_QuotaRateScope) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (QuotaErrorDetails_QuotaRateScope) Descriptor() protoreflect.EnumDescriptor {
return file_google_ads_googleads_v8_errors_errors_proto_enumTypes[0].Descriptor()
}
func (QuotaErrorDetails_QuotaRateScope) Type() protoreflect.EnumType {
return &file_google_ads_googleads_v8_errors_errors_proto_enumTypes[0]
}
func (x QuotaErrorDetails_QuotaRateScope) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use QuotaErrorDetails_QuotaRateScope.Descriptor instead.
func (QuotaErrorDetails_QuotaRateScope) EnumDescriptor() ([]byte, []int) {
return file_google_ads_googleads_v8_errors_errors_proto_rawDescGZIP(), []int{7, 0}
}
// Describes how a GoogleAds API call failed. It's returned inside
// google.rpc.Status.details when a call fails.
type GoogleAdsFailure struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// The list of errors that occurred.
Errors []*GoogleAdsError `protobuf:"bytes,1,rep,name=errors,proto3" json:"errors,omitempty"`
// The unique ID of the request that is used for debugging purposes.
RequestId string `protobuf:"bytes,2,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"`
}
func (x *GoogleAdsFailure) Reset() {
*x = GoogleAdsFailure{}
if protoimpl.UnsafeEnabled {
mi := &file_google_ads_googleads_v8_errors_errors_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GoogleAdsFailure) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GoogleAdsFailure) ProtoMessage() {}
func (x *GoogleAdsFailure) ProtoReflect() protoreflect.Message {
mi := &file_google_ads_googleads_v8_errors_errors_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GoogleAdsFailure.ProtoReflect.Descriptor instead.
func (*GoogleAdsFailure) Descriptor() ([]byte, []int) {
return file_google_ads_googleads_v8_errors_errors_proto_rawDescGZIP(), []int{0}
}
func (x *GoogleAdsFailure) GetErrors() []*GoogleAdsError {
if x != nil {
return x.Errors
}
return nil
}
func (x *GoogleAdsFailure) GetRequestId() string {
if x != nil {
return x.RequestId
}
return ""
}
// GoogleAds-specific error.
type GoogleAdsError struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// An enum value that indicates which error occurred.
ErrorCode *ErrorCode `protobuf:"bytes,1,opt,name=error_code,json=errorCode,proto3" json:"error_code,omitempty"`
// A human-readable description of the error.
Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"`
// The value that triggered the error.
Trigger *common.Value `protobuf:"bytes,3,opt,name=trigger,proto3" json:"trigger,omitempty"`
// Describes the part of the request proto that caused the error.
Location *ErrorLocation `protobuf:"bytes,4,opt,name=location,proto3" json:"location,omitempty"`
// Additional error details, which are returned by certain error codes. Most
// error codes do not include details.
Details *ErrorDetails `protobuf:"bytes,5,opt,name=details,proto3" json:"details,omitempty"`
}
func (x *GoogleAdsError) Reset() {
*x = GoogleAdsError{}
if protoimpl.UnsafeEnabled {
mi := &file_google_ads_googleads_v8_errors_errors_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GoogleAdsError) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GoogleAdsError) ProtoMessage() {}
func (x *GoogleAdsError) ProtoReflect() protoreflect.Message {
mi := &file_google_ads_googleads_v8_errors_errors_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GoogleAdsError.ProtoReflect.Descriptor instead.
func (*GoogleAdsError) Descriptor() ([]byte, []int) {
return file_google_ads_googleads_v8_errors_errors_proto_rawDescGZIP(), []int{1}
}
func (x *GoogleAdsError) GetErrorCode() *ErrorCode {
if x != nil {
return x.ErrorCode
}
return nil
}
func (x *GoogleAdsError) GetMessage() string {
if x != nil {
return x.Message
}
return ""
}
func (x *GoogleAdsError) GetTrigger() *common.Value {
if x != nil {
return x.Trigger
}
return nil
}
func (x *GoogleAdsError) GetLocation() *ErrorLocation {
if x != nil {
return x.Location
}
return nil
}
func (x *GoogleAdsError) GetDetails() *ErrorDetails {
if x != nil {
return x.Details
}
return nil
}
// The error reason represented by type and enum.
type ErrorCode struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// The list of error enums
//
// Types that are assignable to ErrorCode:
// *ErrorCode_RequestError
// *ErrorCode_BiddingStrategyError
// *ErrorCode_UrlFieldError
// *ErrorCode_ListOperationError
// *ErrorCode_QueryError
// *ErrorCode_MutateError
// *ErrorCode_FieldMaskError
// *ErrorCode_AuthorizationError
// *ErrorCode_InternalError
// *ErrorCode_QuotaError
// *ErrorCode_AdError
// *ErrorCode_AdGroupError
// *ErrorCode_CampaignBudgetError
// *ErrorCode_CampaignError
// *ErrorCode_AuthenticationError
// *ErrorCode_AdGroupCriterionError
// *ErrorCode_AdCustomizerError
// *ErrorCode_AdGroupAdError
// *ErrorCode_AdSharingError
// *ErrorCode_AdxError
// *ErrorCode_AssetError
// *ErrorCode_BiddingError
// *ErrorCode_CampaignCriterionError
// *ErrorCode_CollectionSizeError
// *ErrorCode_CountryCodeError
// *ErrorCode_CriterionError
// *ErrorCode_CustomerError
// *ErrorCode_DateError
// *ErrorCode_DateRangeError
// *ErrorCode_DistinctError
// *ErrorCode_FeedAttributeReferenceError
// *ErrorCode_FunctionError
// *ErrorCode_FunctionParsingError
// *ErrorCode_IdError
// *ErrorCode_ImageError
// *ErrorCode_LanguageCodeError
// *ErrorCode_MediaBundleError
// *ErrorCode_MediaUploadError
// *ErrorCode_MediaFileError
// *ErrorCode_MultiplierError
// *ErrorCode_NewResourceCreationError
// *ErrorCode_NotEmptyError
// *ErrorCode_NullError
// *ErrorCode_OperatorError
// *ErrorCode_RangeError
// *ErrorCode_RecommendationError
// *ErrorCode_RegionCodeError
// *ErrorCode_SettingError
// *ErrorCode_StringFormatError
// *ErrorCode_StringLengthError
// *ErrorCode_OperationAccessDeniedError
// *ErrorCode_ResourceAccessDeniedError
// *ErrorCode_ResourceCountLimitExceededError
// *ErrorCode_YoutubeVideoRegistrationError
// *ErrorCode_AdGroupBidModifierError
// *ErrorCode_ContextError
// *ErrorCode_FieldError
// *ErrorCode_SharedSetError
// *ErrorCode_SharedCriterionError
// *ErrorCode_CampaignSharedSetError
// *ErrorCode_ConversionActionError
// *ErrorCode_ConversionAdjustmentUploadError
// *ErrorCode_ConversionCustomVariableError
// *ErrorCode_ConversionUploadError
// *ErrorCode_ConversionValueRuleError
// *ErrorCode_ConversionValueRuleSetError
// *ErrorCode_HeaderError
// *ErrorCode_DatabaseError
// *ErrorCode_PolicyFindingError
// *ErrorCode_EnumError
// *ErrorCode_KeywordPlanError
// *ErrorCode_KeywordPlanCampaignError
// *ErrorCode_KeywordPlanCampaignKeywordError
// *ErrorCode_KeywordPlanAdGroupError
// *ErrorCode_KeywordPlanAdGroupKeywordError
// *ErrorCode_KeywordPlanIdeaError
// *ErrorCode_AccountBudgetProposalError
// *ErrorCode_UserListError
// *ErrorCode_ChangeEventError
// *ErrorCode_ChangeStatusError
// *ErrorCode_FeedError
// *ErrorCode_GeoTargetConstantSuggestionError
// *ErrorCode_CampaignDraftError
// *ErrorCode_FeedItemError
// *ErrorCode_LabelError
// *ErrorCode_BillingSetupError
// *ErrorCode_CustomerClientLinkError
// *ErrorCode_CustomerManagerLinkError
// *ErrorCode_FeedMappingError
// *ErrorCode_CustomerFeedError
// *ErrorCode_AdGroupFeedError
// *ErrorCode_CampaignFeedError
// *ErrorCode_CustomInterestError
// *ErrorCode_CampaignExperimentError
// *ErrorCode_ExtensionFeedItemError
// *ErrorCode_AdParameterError
// *ErrorCode_FeedItemValidationError
// *ErrorCode_ExtensionSettingError
// *ErrorCode_FeedItemSetError
// *ErrorCode_FeedItemSetLinkError
// *ErrorCode_FeedItemTargetError
// *ErrorCode_PolicyViolationError
// *ErrorCode_PartialFailureError
// *ErrorCode_PolicyValidationParameterError
// *ErrorCode_SizeLimitError
// *ErrorCode_OfflineUserDataJobError
// *ErrorCode_NotAllowlistedError
// *ErrorCode_ManagerLinkError
// *ErrorCode_CurrencyCodeError
// *ErrorCode_AccessInvitationError
// *ErrorCode_ReachPlanError
// *ErrorCode_InvoiceError
// *ErrorCode_PaymentsAccountError
// *ErrorCode_TimeZoneError
// *ErrorCode_AssetLinkError
// *ErrorCode_UserDataError
// *ErrorCode_BatchJobError
// *ErrorCode_AccountLinkError
// *ErrorCode_ThirdPartyAppAnalyticsLinkError
// *ErrorCode_CustomerUserAccessError
// *ErrorCode_CustomAudienceError
ErrorCode isErrorCode_ErrorCode `protobuf_oneof:"error_code"`
}
func (x *ErrorCode) Reset() {
*x = ErrorCode{}
if protoimpl.UnsafeEnabled {
mi := &file_google_ads_googleads_v8_errors_errors_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ErrorCode) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ErrorCode) ProtoMessage() {}
func (x *ErrorCode) ProtoReflect() protoreflect.Message {
mi := &file_google_ads_googleads_v8_errors_errors_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ErrorCode.ProtoReflect.Descriptor instead.
func (*ErrorCode) Descriptor() ([]byte, []int) {
return file_google_ads_googleads_v8_errors_errors_proto_rawDescGZIP(), []int{2}
}
func (m *ErrorCode) GetErrorCode() isErrorCode_ErrorCode {
if m != nil {
return m.ErrorCode
}
return nil
}
func (x *ErrorCode) GetRequestError() RequestErrorEnum_RequestError {
if x, ok := x.GetErrorCode().(*ErrorCode_RequestError); ok {
return x.RequestError
}
return RequestErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetBiddingStrategyError() BiddingStrategyErrorEnum_BiddingStrategyError {
if x, ok := x.GetErrorCode().(*ErrorCode_BiddingStrategyError); ok {
return x.BiddingStrategyError
}
return BiddingStrategyErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetUrlFieldError() UrlFieldErrorEnum_UrlFieldError {
if x, ok := x.GetErrorCode().(*ErrorCode_UrlFieldError); ok {
return x.UrlFieldError
}
return UrlFieldErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetListOperationError() ListOperationErrorEnum_ListOperationError {
if x, ok := x.GetErrorCode().(*ErrorCode_ListOperationError); ok {
return x.ListOperationError
}
return ListOperationErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetQueryError() QueryErrorEnum_QueryError {
if x, ok := x.GetErrorCode().(*ErrorCode_QueryError); ok {
return x.QueryError
}
return QueryErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetMutateError() MutateErrorEnum_MutateError {
if x, ok := x.GetErrorCode().(*ErrorCode_MutateError); ok {
return x.MutateError
}
return MutateErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetFieldMaskError() FieldMaskErrorEnum_FieldMaskError {
if x, ok := x.GetErrorCode().(*ErrorCode_FieldMaskError); ok {
return x.FieldMaskError
}
return FieldMaskErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetAuthorizationError() AuthorizationErrorEnum_AuthorizationError {
if x, ok := x.GetErrorCode().(*ErrorCode_AuthorizationError); ok {
return x.AuthorizationError
}
return AuthorizationErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetInternalError() InternalErrorEnum_InternalError {
if x, ok := x.GetErrorCode().(*ErrorCode_InternalError); ok {
return x.InternalError
}
return InternalErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetQuotaError() QuotaErrorEnum_QuotaError {
if x, ok := x.GetErrorCode().(*ErrorCode_QuotaError); ok {
return x.QuotaError
}
return QuotaErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetAdError() AdErrorEnum_AdError {
if x, ok := x.GetErrorCode().(*ErrorCode_AdError); ok {
return x.AdError
}
return AdErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetAdGroupError() AdGroupErrorEnum_AdGroupError {
if x, ok := x.GetErrorCode().(*ErrorCode_AdGroupError); ok {
return x.AdGroupError
}
return AdGroupErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetCampaignBudgetError() CampaignBudgetErrorEnum_CampaignBudgetError {
if x, ok := x.GetErrorCode().(*ErrorCode_CampaignBudgetError); ok {
return x.CampaignBudgetError
}
return CampaignBudgetErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetCampaignError() CampaignErrorEnum_CampaignError {
if x, ok := x.GetErrorCode().(*ErrorCode_CampaignError); ok {
return x.CampaignError
}
return CampaignErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetAuthenticationError() AuthenticationErrorEnum_AuthenticationError {
if x, ok := x.GetErrorCode().(*ErrorCode_AuthenticationError); ok {
return x.AuthenticationError
}
return AuthenticationErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetAdGroupCriterionError() AdGroupCriterionErrorEnum_AdGroupCriterionError {
if x, ok := x.GetErrorCode().(*ErrorCode_AdGroupCriterionError); ok {
return x.AdGroupCriterionError
}
return AdGroupCriterionErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetAdCustomizerError() AdCustomizerErrorEnum_AdCustomizerError {
if x, ok := x.GetErrorCode().(*ErrorCode_AdCustomizerError); ok {
return x.AdCustomizerError
}
return AdCustomizerErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetAdGroupAdError() AdGroupAdErrorEnum_AdGroupAdError {
if x, ok := x.GetErrorCode().(*ErrorCode_AdGroupAdError); ok {
return x.AdGroupAdError
}
return AdGroupAdErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetAdSharingError() AdSharingErrorEnum_AdSharingError {
if x, ok := x.GetErrorCode().(*ErrorCode_AdSharingError); ok {
return x.AdSharingError
}
return AdSharingErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetAdxError() AdxErrorEnum_AdxError {
if x, ok := x.GetErrorCode().(*ErrorCode_AdxError); ok {
return x.AdxError
}
return AdxErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetAssetError() AssetErrorEnum_AssetError {
if x, ok := x.GetErrorCode().(*ErrorCode_AssetError); ok {
return x.AssetError
}
return AssetErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetBiddingError() BiddingErrorEnum_BiddingError {
if x, ok := x.GetErrorCode().(*ErrorCode_BiddingError); ok {
return x.BiddingError
}
return BiddingErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetCampaignCriterionError() CampaignCriterionErrorEnum_CampaignCriterionError {
if x, ok := x.GetErrorCode().(*ErrorCode_CampaignCriterionError); ok {
return x.CampaignCriterionError
}
return CampaignCriterionErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetCollectionSizeError() CollectionSizeErrorEnum_CollectionSizeError {
if x, ok := x.GetErrorCode().(*ErrorCode_CollectionSizeError); ok {
return x.CollectionSizeError
}
return CollectionSizeErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetCountryCodeError() CountryCodeErrorEnum_CountryCodeError {
if x, ok := x.GetErrorCode().(*ErrorCode_CountryCodeError); ok {
return x.CountryCodeError
}
return CountryCodeErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetCriterionError() CriterionErrorEnum_CriterionError {
if x, ok := x.GetErrorCode().(*ErrorCode_CriterionError); ok {
return x.CriterionError
}
return CriterionErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetCustomerError() CustomerErrorEnum_CustomerError {
if x, ok := x.GetErrorCode().(*ErrorCode_CustomerError); ok {
return x.CustomerError
}
return CustomerErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetDateError() DateErrorEnum_DateError {
if x, ok := x.GetErrorCode().(*ErrorCode_DateError); ok {
return x.DateError
}
return DateErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetDateRangeError() DateRangeErrorEnum_DateRangeError {
if x, ok := x.GetErrorCode().(*ErrorCode_DateRangeError); ok {
return x.DateRangeError
}
return DateRangeErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetDistinctError() DistinctErrorEnum_DistinctError {
if x, ok := x.GetErrorCode().(*ErrorCode_DistinctError); ok {
return x.DistinctError
}
return DistinctErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetFeedAttributeReferenceError() FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError {
if x, ok := x.GetErrorCode().(*ErrorCode_FeedAttributeReferenceError); ok {
return x.FeedAttributeReferenceError
}
return FeedAttributeReferenceErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetFunctionError() FunctionErrorEnum_FunctionError {
if x, ok := x.GetErrorCode().(*ErrorCode_FunctionError); ok {
return x.FunctionError
}
return FunctionErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetFunctionParsingError() FunctionParsingErrorEnum_FunctionParsingError {
if x, ok := x.GetErrorCode().(*ErrorCode_FunctionParsingError); ok {
return x.FunctionParsingError
}
return FunctionParsingErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetIdError() IdErrorEnum_IdError {
if x, ok := x.GetErrorCode().(*ErrorCode_IdError); ok {
return x.IdError
}
return IdErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetImageError() ImageErrorEnum_ImageError {
if x, ok := x.GetErrorCode().(*ErrorCode_ImageError); ok {
return x.ImageError
}
return ImageErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetLanguageCodeError() LanguageCodeErrorEnum_LanguageCodeError {
if x, ok := x.GetErrorCode().(*ErrorCode_LanguageCodeError); ok {
return x.LanguageCodeError
}
return LanguageCodeErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetMediaBundleError() MediaBundleErrorEnum_MediaBundleError {
if x, ok := x.GetErrorCode().(*ErrorCode_MediaBundleError); ok {
return x.MediaBundleError
}
return MediaBundleErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetMediaUploadError() MediaUploadErrorEnum_MediaUploadError {
if x, ok := x.GetErrorCode().(*ErrorCode_MediaUploadError); ok {
return x.MediaUploadError
}
return MediaUploadErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetMediaFileError() MediaFileErrorEnum_MediaFileError {
if x, ok := x.GetErrorCode().(*ErrorCode_MediaFileError); ok {
return x.MediaFileError
}
return MediaFileErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetMultiplierError() MultiplierErrorEnum_MultiplierError {
if x, ok := x.GetErrorCode().(*ErrorCode_MultiplierError); ok {
return x.MultiplierError
}
return MultiplierErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetNewResourceCreationError() NewResourceCreationErrorEnum_NewResourceCreationError {
if x, ok := x.GetErrorCode().(*ErrorCode_NewResourceCreationError); ok {
return x.NewResourceCreationError
}
return NewResourceCreationErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetNotEmptyError() NotEmptyErrorEnum_NotEmptyError {
if x, ok := x.GetErrorCode().(*ErrorCode_NotEmptyError); ok {
return x.NotEmptyError
}
return NotEmptyErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetNullError() NullErrorEnum_NullError {
if x, ok := x.GetErrorCode().(*ErrorCode_NullError); ok {
return x.NullError
}
return NullErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetOperatorError() OperatorErrorEnum_OperatorError {
if x, ok := x.GetErrorCode().(*ErrorCode_OperatorError); ok {
return x.OperatorError
}
return OperatorErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetRangeError() RangeErrorEnum_RangeError {
if x, ok := x.GetErrorCode().(*ErrorCode_RangeError); ok {
return x.RangeError
}
return RangeErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetRecommendationError() RecommendationErrorEnum_RecommendationError {
if x, ok := x.GetErrorCode().(*ErrorCode_RecommendationError); ok {
return x.RecommendationError
}
return RecommendationErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetRegionCodeError() RegionCodeErrorEnum_RegionCodeError {
if x, ok := x.GetErrorCode().(*ErrorCode_RegionCodeError); ok {
return x.RegionCodeError
}
return RegionCodeErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetSettingError() SettingErrorEnum_SettingError {
if x, ok := x.GetErrorCode().(*ErrorCode_SettingError); ok {
return x.SettingError
}
return SettingErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetStringFormatError() StringFormatErrorEnum_StringFormatError {
if x, ok := x.GetErrorCode().(*ErrorCode_StringFormatError); ok {
return x.StringFormatError
}
return StringFormatErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetStringLengthError() StringLengthErrorEnum_StringLengthError {
if x, ok := x.GetErrorCode().(*ErrorCode_StringLengthError); ok {
return x.StringLengthError
}
return StringLengthErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetOperationAccessDeniedError() OperationAccessDeniedErrorEnum_OperationAccessDeniedError {
if x, ok := x.GetErrorCode().(*ErrorCode_OperationAccessDeniedError); ok {
return x.OperationAccessDeniedError
}
return OperationAccessDeniedErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetResourceAccessDeniedError() ResourceAccessDeniedErrorEnum_ResourceAccessDeniedError {
if x, ok := x.GetErrorCode().(*ErrorCode_ResourceAccessDeniedError); ok {
return x.ResourceAccessDeniedError
}
return ResourceAccessDeniedErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetResourceCountLimitExceededError() ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError {
if x, ok := x.GetErrorCode().(*ErrorCode_ResourceCountLimitExceededError); ok {
return x.ResourceCountLimitExceededError
}
return ResourceCountLimitExceededErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetYoutubeVideoRegistrationError() YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError {
if x, ok := x.GetErrorCode().(*ErrorCode_YoutubeVideoRegistrationError); ok {
return x.YoutubeVideoRegistrationError
}
return YoutubeVideoRegistrationErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetAdGroupBidModifierError() AdGroupBidModifierErrorEnum_AdGroupBidModifierError {
if x, ok := x.GetErrorCode().(*ErrorCode_AdGroupBidModifierError); ok {
return x.AdGroupBidModifierError
}
return AdGroupBidModifierErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetContextError() ContextErrorEnum_ContextError {
if x, ok := x.GetErrorCode().(*ErrorCode_ContextError); ok {
return x.ContextError
}
return ContextErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetFieldError() FieldErrorEnum_FieldError {
if x, ok := x.GetErrorCode().(*ErrorCode_FieldError); ok {
return x.FieldError
}
return FieldErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetSharedSetError() SharedSetErrorEnum_SharedSetError {
if x, ok := x.GetErrorCode().(*ErrorCode_SharedSetError); ok {
return x.SharedSetError
}
return SharedSetErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetSharedCriterionError() SharedCriterionErrorEnum_SharedCriterionError {
if x, ok := x.GetErrorCode().(*ErrorCode_SharedCriterionError); ok {
return x.SharedCriterionError
}
return SharedCriterionErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetCampaignSharedSetError() CampaignSharedSetErrorEnum_CampaignSharedSetError {
if x, ok := x.GetErrorCode().(*ErrorCode_CampaignSharedSetError); ok {
return x.CampaignSharedSetError
}
return CampaignSharedSetErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetConversionActionError() ConversionActionErrorEnum_ConversionActionError {
if x, ok := x.GetErrorCode().(*ErrorCode_ConversionActionError); ok {
return x.ConversionActionError
}
return ConversionActionErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetConversionAdjustmentUploadError() ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError {
if x, ok := x.GetErrorCode().(*ErrorCode_ConversionAdjustmentUploadError); ok {
return x.ConversionAdjustmentUploadError
}
return ConversionAdjustmentUploadErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetConversionCustomVariableError() ConversionCustomVariableErrorEnum_ConversionCustomVariableError {
if x, ok := x.GetErrorCode().(*ErrorCode_ConversionCustomVariableError); ok {
return x.ConversionCustomVariableError
}
return ConversionCustomVariableErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetConversionUploadError() ConversionUploadErrorEnum_ConversionUploadError {
if x, ok := x.GetErrorCode().(*ErrorCode_ConversionUploadError); ok {
return x.ConversionUploadError
}
return ConversionUploadErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetConversionValueRuleError() ConversionValueRuleErrorEnum_ConversionValueRuleError {
if x, ok := x.GetErrorCode().(*ErrorCode_ConversionValueRuleError); ok {
return x.ConversionValueRuleError
}
return ConversionValueRuleErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetConversionValueRuleSetError() ConversionValueRuleSetErrorEnum_ConversionValueRuleSetError {
if x, ok := x.GetErrorCode().(*ErrorCode_ConversionValueRuleSetError); ok {
return x.ConversionValueRuleSetError
}
return ConversionValueRuleSetErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetHeaderError() HeaderErrorEnum_HeaderError {
if x, ok := x.GetErrorCode().(*ErrorCode_HeaderError); ok {
return x.HeaderError
}
return HeaderErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetDatabaseError() DatabaseErrorEnum_DatabaseError {
if x, ok := x.GetErrorCode().(*ErrorCode_DatabaseError); ok {
return x.DatabaseError
}
return DatabaseErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetPolicyFindingError() PolicyFindingErrorEnum_PolicyFindingError {
if x, ok := x.GetErrorCode().(*ErrorCode_PolicyFindingError); ok {
return x.PolicyFindingError
}
return PolicyFindingErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetEnumError() EnumErrorEnum_EnumError {
if x, ok := x.GetErrorCode().(*ErrorCode_EnumError); ok {
return x.EnumError
}
return EnumErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetKeywordPlanError() KeywordPlanErrorEnum_KeywordPlanError {
if x, ok := x.GetErrorCode().(*ErrorCode_KeywordPlanError); ok {
return x.KeywordPlanError
}
return KeywordPlanErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetKeywordPlanCampaignError() KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError {
if x, ok := x.GetErrorCode().(*ErrorCode_KeywordPlanCampaignError); ok {
return x.KeywordPlanCampaignError
}
return KeywordPlanCampaignErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetKeywordPlanCampaignKeywordError() KeywordPlanCampaignKeywordErrorEnum_KeywordPlanCampaignKeywordError {
if x, ok := x.GetErrorCode().(*ErrorCode_KeywordPlanCampaignKeywordError); ok {
return x.KeywordPlanCampaignKeywordError
}
return KeywordPlanCampaignKeywordErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetKeywordPlanAdGroupError() KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError {
if x, ok := x.GetErrorCode().(*ErrorCode_KeywordPlanAdGroupError); ok {
return x.KeywordPlanAdGroupError
}
return KeywordPlanAdGroupErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetKeywordPlanAdGroupKeywordError() KeywordPlanAdGroupKeywordErrorEnum_KeywordPlanAdGroupKeywordError {
if x, ok := x.GetErrorCode().(*ErrorCode_KeywordPlanAdGroupKeywordError); ok {
return x.KeywordPlanAdGroupKeywordError
}
return KeywordPlanAdGroupKeywordErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetKeywordPlanIdeaError() KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError {
if x, ok := x.GetErrorCode().(*ErrorCode_KeywordPlanIdeaError); ok {
return x.KeywordPlanIdeaError
}
return KeywordPlanIdeaErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetAccountBudgetProposalError() AccountBudgetProposalErrorEnum_AccountBudgetProposalError {
if x, ok := x.GetErrorCode().(*ErrorCode_AccountBudgetProposalError); ok {
return x.AccountBudgetProposalError
}
return AccountBudgetProposalErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetUserListError() UserListErrorEnum_UserListError {
if x, ok := x.GetErrorCode().(*ErrorCode_UserListError); ok {
return x.UserListError
}
return UserListErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetChangeEventError() ChangeEventErrorEnum_ChangeEventError {
if x, ok := x.GetErrorCode().(*ErrorCode_ChangeEventError); ok {
return x.ChangeEventError
}
return ChangeEventErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetChangeStatusError() ChangeStatusErrorEnum_ChangeStatusError {
if x, ok := x.GetErrorCode().(*ErrorCode_ChangeStatusError); ok {
return x.ChangeStatusError
}
return ChangeStatusErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetFeedError() FeedErrorEnum_FeedError {
if x, ok := x.GetErrorCode().(*ErrorCode_FeedError); ok {
return x.FeedError
}
return FeedErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetGeoTargetConstantSuggestionError() GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError {
if x, ok := x.GetErrorCode().(*ErrorCode_GeoTargetConstantSuggestionError); ok {
return x.GeoTargetConstantSuggestionError
}
return GeoTargetConstantSuggestionErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetCampaignDraftError() CampaignDraftErrorEnum_CampaignDraftError {
if x, ok := x.GetErrorCode().(*ErrorCode_CampaignDraftError); ok {
return x.CampaignDraftError
}
return CampaignDraftErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetFeedItemError() FeedItemErrorEnum_FeedItemError {
if x, ok := x.GetErrorCode().(*ErrorCode_FeedItemError); ok {
return x.FeedItemError
}
return FeedItemErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetLabelError() LabelErrorEnum_LabelError {
if x, ok := x.GetErrorCode().(*ErrorCode_LabelError); ok {
return x.LabelError
}
return LabelErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetBillingSetupError() BillingSetupErrorEnum_BillingSetupError {
if x, ok := x.GetErrorCode().(*ErrorCode_BillingSetupError); ok {
return x.BillingSetupError
}
return BillingSetupErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetCustomerClientLinkError() CustomerClientLinkErrorEnum_CustomerClientLinkError {
if x, ok := x.GetErrorCode().(*ErrorCode_CustomerClientLinkError); ok {
return x.CustomerClientLinkError
}
return CustomerClientLinkErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetCustomerManagerLinkError() CustomerManagerLinkErrorEnum_CustomerManagerLinkError {
if x, ok := x.GetErrorCode().(*ErrorCode_CustomerManagerLinkError); ok {
return x.CustomerManagerLinkError
}
return CustomerManagerLinkErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetFeedMappingError() FeedMappingErrorEnum_FeedMappingError {
if x, ok := x.GetErrorCode().(*ErrorCode_FeedMappingError); ok {
return x.FeedMappingError
}
return FeedMappingErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetCustomerFeedError() CustomerFeedErrorEnum_CustomerFeedError {
if x, ok := x.GetErrorCode().(*ErrorCode_CustomerFeedError); ok {
return x.CustomerFeedError
}
return CustomerFeedErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetAdGroupFeedError() AdGroupFeedErrorEnum_AdGroupFeedError {
if x, ok := x.GetErrorCode().(*ErrorCode_AdGroupFeedError); ok {
return x.AdGroupFeedError
}
return AdGroupFeedErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetCampaignFeedError() CampaignFeedErrorEnum_CampaignFeedError {
if x, ok := x.GetErrorCode().(*ErrorCode_CampaignFeedError); ok {
return x.CampaignFeedError
}
return CampaignFeedErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetCustomInterestError() CustomInterestErrorEnum_CustomInterestError {
if x, ok := x.GetErrorCode().(*ErrorCode_CustomInterestError); ok {
return x.CustomInterestError
}
return CustomInterestErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetCampaignExperimentError() CampaignExperimentErrorEnum_CampaignExperimentError {
if x, ok := x.GetErrorCode().(*ErrorCode_CampaignExperimentError); ok {
return x.CampaignExperimentError
}
return CampaignExperimentErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetExtensionFeedItemError() ExtensionFeedItemErrorEnum_ExtensionFeedItemError {
if x, ok := x.GetErrorCode().(*ErrorCode_ExtensionFeedItemError); ok {
return x.ExtensionFeedItemError
}
return ExtensionFeedItemErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetAdParameterError() AdParameterErrorEnum_AdParameterError {
if x, ok := x.GetErrorCode().(*ErrorCode_AdParameterError); ok {
return x.AdParameterError
}
return AdParameterErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetFeedItemValidationError() FeedItemValidationErrorEnum_FeedItemValidationError {
if x, ok := x.GetErrorCode().(*ErrorCode_FeedItemValidationError); ok {
return x.FeedItemValidationError
}
return FeedItemValidationErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetExtensionSettingError() ExtensionSettingErrorEnum_ExtensionSettingError {
if x, ok := x.GetErrorCode().(*ErrorCode_ExtensionSettingError); ok {
return x.ExtensionSettingError
}
return ExtensionSettingErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetFeedItemSetError() FeedItemSetErrorEnum_FeedItemSetError {
if x, ok := x.GetErrorCode().(*ErrorCode_FeedItemSetError); ok {
return x.FeedItemSetError
}
return FeedItemSetErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetFeedItemSetLinkError() FeedItemSetLinkErrorEnum_FeedItemSetLinkError {
if x, ok := x.GetErrorCode().(*ErrorCode_FeedItemSetLinkError); ok {
return x.FeedItemSetLinkError
}
return FeedItemSetLinkErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetFeedItemTargetError() FeedItemTargetErrorEnum_FeedItemTargetError {
if x, ok := x.GetErrorCode().(*ErrorCode_FeedItemTargetError); ok {
return x.FeedItemTargetError
}
return FeedItemTargetErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetPolicyViolationError() PolicyViolationErrorEnum_PolicyViolationError {
if x, ok := x.GetErrorCode().(*ErrorCode_PolicyViolationError); ok {
return x.PolicyViolationError
}
return PolicyViolationErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetPartialFailureError() PartialFailureErrorEnum_PartialFailureError {
if x, ok := x.GetErrorCode().(*ErrorCode_PartialFailureError); ok {
return x.PartialFailureError
}
return PartialFailureErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetPolicyValidationParameterError() PolicyValidationParameterErrorEnum_PolicyValidationParameterError {
if x, ok := x.GetErrorCode().(*ErrorCode_PolicyValidationParameterError); ok {
return x.PolicyValidationParameterError
}
return PolicyValidationParameterErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetSizeLimitError() SizeLimitErrorEnum_SizeLimitError {
if x, ok := x.GetErrorCode().(*ErrorCode_SizeLimitError); ok {
return x.SizeLimitError
}
return SizeLimitErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetOfflineUserDataJobError() OfflineUserDataJobErrorEnum_OfflineUserDataJobError {
if x, ok := x.GetErrorCode().(*ErrorCode_OfflineUserDataJobError); ok {
return x.OfflineUserDataJobError
}
return OfflineUserDataJobErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetNotAllowlistedError() NotAllowlistedErrorEnum_NotAllowlistedError {
if x, ok := x.GetErrorCode().(*ErrorCode_NotAllowlistedError); ok {
return x.NotAllowlistedError
}
return NotAllowlistedErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetManagerLinkError() ManagerLinkErrorEnum_ManagerLinkError {
if x, ok := x.GetErrorCode().(*ErrorCode_ManagerLinkError); ok {
return x.ManagerLinkError
}
return ManagerLinkErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetCurrencyCodeError() CurrencyCodeErrorEnum_CurrencyCodeError {
if x, ok := x.GetErrorCode().(*ErrorCode_CurrencyCodeError); ok {
return x.CurrencyCodeError
}
return CurrencyCodeErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetAccessInvitationError() AccessInvitationErrorEnum_AccessInvitationError {
if x, ok := x.GetErrorCode().(*ErrorCode_AccessInvitationError); ok {
return x.AccessInvitationError
}
return AccessInvitationErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetReachPlanError() ReachPlanErrorEnum_ReachPlanError {
if x, ok := x.GetErrorCode().(*ErrorCode_ReachPlanError); ok {
return x.ReachPlanError
}
return ReachPlanErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetInvoiceError() InvoiceErrorEnum_InvoiceError {
if x, ok := x.GetErrorCode().(*ErrorCode_InvoiceError); ok {
return x.InvoiceError
}
return InvoiceErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetPaymentsAccountError() PaymentsAccountErrorEnum_PaymentsAccountError {
if x, ok := x.GetErrorCode().(*ErrorCode_PaymentsAccountError); ok {
return x.PaymentsAccountError
}
return PaymentsAccountErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetTimeZoneError() TimeZoneErrorEnum_TimeZoneError {
if x, ok := x.GetErrorCode().(*ErrorCode_TimeZoneError); ok {
return x.TimeZoneError
}
return TimeZoneErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetAssetLinkError() AssetLinkErrorEnum_AssetLinkError {
if x, ok := x.GetErrorCode().(*ErrorCode_AssetLinkError); ok {
return x.AssetLinkError
}
return AssetLinkErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetUserDataError() UserDataErrorEnum_UserDataError {
if x, ok := x.GetErrorCode().(*ErrorCode_UserDataError); ok {
return x.UserDataError
}
return UserDataErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetBatchJobError() BatchJobErrorEnum_BatchJobError {
if x, ok := x.GetErrorCode().(*ErrorCode_BatchJobError); ok {
return x.BatchJobError
}
return BatchJobErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetAccountLinkError() AccountLinkErrorEnum_AccountLinkError {
if x, ok := x.GetErrorCode().(*ErrorCode_AccountLinkError); ok {
return x.AccountLinkError
}
return AccountLinkErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetThirdPartyAppAnalyticsLinkError() ThirdPartyAppAnalyticsLinkErrorEnum_ThirdPartyAppAnalyticsLinkError {
if x, ok := x.GetErrorCode().(*ErrorCode_ThirdPartyAppAnalyticsLinkError); ok {
return x.ThirdPartyAppAnalyticsLinkError
}
return ThirdPartyAppAnalyticsLinkErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetCustomerUserAccessError() CustomerUserAccessErrorEnum_CustomerUserAccessError {
if x, ok := x.GetErrorCode().(*ErrorCode_CustomerUserAccessError); ok {
return x.CustomerUserAccessError
}
return CustomerUserAccessErrorEnum_UNSPECIFIED
}
func (x *ErrorCode) GetCustomAudienceError() CustomAudienceErrorEnum_CustomAudienceError {
if x, ok := x.GetErrorCode().(*ErrorCode_CustomAudienceError); ok {
return x.CustomAudienceError
}
return CustomAudienceErrorEnum_UNSPECIFIED
}
type isErrorCode_ErrorCode interface {
isErrorCode_ErrorCode()
}
type ErrorCode_RequestError struct {
// An error caused by the request
RequestError RequestErrorEnum_RequestError `protobuf:"varint,1,opt,name=request_error,json=requestError,proto3,enum=google.ads.googleads.v8.errors.RequestErrorEnum_RequestError,oneof"`
}
type ErrorCode_BiddingStrategyError struct {
// An error with a Bidding Strategy mutate.
BiddingStrategyError BiddingStrategyErrorEnum_BiddingStrategyError `protobuf:"varint,2,opt,name=bidding_strategy_error,json=biddingStrategyError,proto3,enum=google.ads.googleads.v8.errors.BiddingStrategyErrorEnum_BiddingStrategyError,oneof"`
}
type ErrorCode_UrlFieldError struct {
// An error with a URL field mutate.
UrlFieldError UrlFieldErrorEnum_UrlFieldError `protobuf:"varint,3,opt,name=url_field_error,json=urlFieldError,proto3,enum=google.ads.googleads.v8.errors.UrlFieldErrorEnum_UrlFieldError,oneof"`
}
type ErrorCode_ListOperationError struct {
// An error with a list operation.
ListOperationError ListOperationErrorEnum_ListOperationError `protobuf:"varint,4,opt,name=list_operation_error,json=listOperationError,proto3,enum=google.ads.googleads.v8.errors.ListOperationErrorEnum_ListOperationError,oneof"`
}
type ErrorCode_QueryError struct {
// An error with an AWQL query
QueryError QueryErrorEnum_QueryError `protobuf:"varint,5,opt,name=query_error,json=queryError,proto3,enum=google.ads.googleads.v8.errors.QueryErrorEnum_QueryError,oneof"`
}
type ErrorCode_MutateError struct {
// An error with a mutate
MutateError MutateErrorEnum_MutateError `protobuf:"varint,7,opt,name=mutate_error,json=mutateError,proto3,enum=google.ads.googleads.v8.errors.MutateErrorEnum_MutateError,oneof"`
}
type ErrorCode_FieldMaskError struct {
// An error with a field mask
FieldMaskError FieldMaskErrorEnum_FieldMaskError `protobuf:"varint,8,opt,name=field_mask_error,json=fieldMaskError,proto3,enum=google.ads.googleads.v8.errors.FieldMaskErrorEnum_FieldMaskError,oneof"`
}
type ErrorCode_AuthorizationError struct {
// An error encountered when trying to authorize a user.
AuthorizationError AuthorizationErrorEnum_AuthorizationError `protobuf:"varint,9,opt,name=authorization_error,json=authorizationError,proto3,enum=google.ads.googleads.v8.errors.AuthorizationErrorEnum_AuthorizationError,oneof"`
}
type ErrorCode_InternalError struct {
// An unexpected server-side error.
InternalError InternalErrorEnum_InternalError `protobuf:"varint,10,opt,name=internal_error,json=internalError,proto3,enum=google.ads.googleads.v8.errors.InternalErrorEnum_InternalError,oneof"`
}
type ErrorCode_QuotaError struct {
// An error with the amonut of quota remaining.
QuotaError QuotaErrorEnum_QuotaError `protobuf:"varint,11,opt,name=quota_error,json=quotaError,proto3,enum=google.ads.googleads.v8.errors.QuotaErrorEnum_QuotaError,oneof"`
}
type ErrorCode_AdError struct {
// An error with an Ad Group Ad mutate.
AdError AdErrorEnum_AdError `protobuf:"varint,12,opt,name=ad_error,json=adError,proto3,enum=google.ads.googleads.v8.errors.AdErrorEnum_AdError,oneof"`
}
type ErrorCode_AdGroupError struct {
// An error with an Ad Group mutate.
AdGroupError AdGroupErrorEnum_AdGroupError `protobuf:"varint,13,opt,name=ad_group_error,json=adGroupError,proto3,enum=google.ads.googleads.v8.errors.AdGroupErrorEnum_AdGroupError,oneof"`
}
type ErrorCode_CampaignBudgetError struct {
// An error with a Campaign Budget mutate.
CampaignBudgetError CampaignBudgetErrorEnum_CampaignBudgetError `protobuf:"varint,14,opt,name=campaign_budget_error,json=campaignBudgetError,proto3,enum=google.ads.googleads.v8.errors.CampaignBudgetErrorEnum_CampaignBudgetError,oneof"`
}
type ErrorCode_CampaignError struct {
// An error with a Campaign mutate.
CampaignError CampaignErrorEnum_CampaignError `protobuf:"varint,15,opt,name=campaign_error,json=campaignError,proto3,enum=google.ads.googleads.v8.errors.CampaignErrorEnum_CampaignError,oneof"`
}
type ErrorCode_AuthenticationError struct {
// Indicates failure to properly authenticate user.
AuthenticationError AuthenticationErrorEnum_AuthenticationError `protobuf:"varint,17,opt,name=authentication_error,json=authenticationError,proto3,enum=google.ads.googleads.v8.errors.AuthenticationErrorEnum_AuthenticationError,oneof"`
}
type ErrorCode_AdGroupCriterionError struct {
// Indicates failure to properly authenticate user.
AdGroupCriterionError AdGroupCriterionErrorEnum_AdGroupCriterionError `protobuf:"varint,18,opt,name=ad_group_criterion_error,json=adGroupCriterionError,proto3,enum=google.ads.googleads.v8.errors.AdGroupCriterionErrorEnum_AdGroupCriterionError,oneof"`
}
type ErrorCode_AdCustomizerError struct {
// The reasons for the ad customizer error
AdCustomizerError AdCustomizerErrorEnum_AdCustomizerError `protobuf:"varint,19,opt,name=ad_customizer_error,json=adCustomizerError,proto3,enum=google.ads.googleads.v8.errors.AdCustomizerErrorEnum_AdCustomizerError,oneof"`
}
type ErrorCode_AdGroupAdError struct {
// The reasons for the ad group ad error
AdGroupAdError AdGroupAdErrorEnum_AdGroupAdError `protobuf:"varint,21,opt,name=ad_group_ad_error,json=adGroupAdError,proto3,enum=google.ads.googleads.v8.errors.AdGroupAdErrorEnum_AdGroupAdError,oneof"`
}
type ErrorCode_AdSharingError struct {
// The reasons for the ad sharing error
AdSharingError AdSharingErrorEnum_AdSharingError `protobuf:"varint,24,opt,name=ad_sharing_error,json=adSharingError,proto3,enum=google.ads.googleads.v8.errors.AdSharingErrorEnum_AdSharingError,oneof"`
}
type ErrorCode_AdxError struct {
// The reasons for the adx error
AdxError AdxErrorEnum_AdxError `protobuf:"varint,25,opt,name=adx_error,json=adxError,proto3,enum=google.ads.googleads.v8.errors.AdxErrorEnum_AdxError,oneof"`
}
type ErrorCode_AssetError struct {
// The reasons for the asset error
AssetError AssetErrorEnum_AssetError `protobuf:"varint,107,opt,name=asset_error,json=assetError,proto3,enum=google.ads.googleads.v8.errors.AssetErrorEnum_AssetError,oneof"`
}
type ErrorCode_BiddingError struct {
// The reasons for the bidding errors
BiddingError BiddingErrorEnum_BiddingError `protobuf:"varint,26,opt,name=bidding_error,json=biddingError,proto3,enum=google.ads.googleads.v8.errors.BiddingErrorEnum_BiddingError,oneof"`
}
type ErrorCode_CampaignCriterionError struct {
// The reasons for the campaign criterion error
CampaignCriterionError CampaignCriterionErrorEnum_CampaignCriterionError `protobuf:"varint,29,opt,name=campaign_criterion_error,json=campaignCriterionError,proto3,enum=google.ads.googleads.v8.errors.CampaignCriterionErrorEnum_CampaignCriterionError,oneof"`
}
type ErrorCode_CollectionSizeError struct {
// The reasons for the collection size error
CollectionSizeError CollectionSizeErrorEnum_CollectionSizeError `protobuf:"varint,31,opt,name=collection_size_error,json=collectionSizeError,proto3,enum=google.ads.googleads.v8.errors.CollectionSizeErrorEnum_CollectionSizeError,oneof"`
}
type ErrorCode_CountryCodeError struct {
// The reasons for the country code error
CountryCodeError CountryCodeErrorEnum_CountryCodeError `protobuf:"varint,109,opt,name=country_code_error,json=countryCodeError,proto3,enum=google.ads.googleads.v8.errors.CountryCodeErrorEnum_CountryCodeError,oneof"`
}
type ErrorCode_CriterionError struct {
// The reasons for the criterion error
CriterionError CriterionErrorEnum_CriterionError `protobuf:"varint,32,opt,name=criterion_error,json=criterionError,proto3,enum=google.ads.googleads.v8.errors.CriterionErrorEnum_CriterionError,oneof"`
}
type ErrorCode_CustomerError struct {
// The reasons for the customer error
CustomerError CustomerErrorEnum_CustomerError `protobuf:"varint,90,opt,name=customer_error,json=customerError,proto3,enum=google.ads.googleads.v8.errors.CustomerErrorEnum_CustomerError,oneof"`
}
type ErrorCode_DateError struct {
// The reasons for the date error
DateError DateErrorEnum_DateError `protobuf:"varint,33,opt,name=date_error,json=dateError,proto3,enum=google.ads.googleads.v8.errors.DateErrorEnum_DateError,oneof"`
}
type ErrorCode_DateRangeError struct {
// The reasons for the date range error
DateRangeError DateRangeErrorEnum_DateRangeError `protobuf:"varint,34,opt,name=date_range_error,json=dateRangeError,proto3,enum=google.ads.googleads.v8.errors.DateRangeErrorEnum_DateRangeError,oneof"`
}
type ErrorCode_DistinctError struct {
// The reasons for the distinct error
DistinctError DistinctErrorEnum_DistinctError `protobuf:"varint,35,opt,name=distinct_error,json=distinctError,proto3,enum=google.ads.googleads.v8.errors.DistinctErrorEnum_DistinctError,oneof"`
}
type ErrorCode_FeedAttributeReferenceError struct {
// The reasons for the feed attribute reference error
FeedAttributeReferenceError FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError `protobuf:"varint,36,opt,name=feed_attribute_reference_error,json=feedAttributeReferenceError,proto3,enum=google.ads.googleads.v8.errors.FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError,oneof"`
}
type ErrorCode_FunctionError struct {
// The reasons for the function error
FunctionError FunctionErrorEnum_FunctionError `protobuf:"varint,37,opt,name=function_error,json=functionError,proto3,enum=google.ads.googleads.v8.errors.FunctionErrorEnum_FunctionError,oneof"`
}
type ErrorCode_FunctionParsingError struct {
// The reasons for the function parsing error
FunctionParsingError FunctionParsingErrorEnum_FunctionParsingError `protobuf:"varint,38,opt,name=function_parsing_error,json=functionParsingError,proto3,enum=google.ads.googleads.v8.errors.FunctionParsingErrorEnum_FunctionParsingError,oneof"`
}
type ErrorCode_IdError struct {
// The reasons for the id error
IdError IdErrorEnum_IdError `protobuf:"varint,39,opt,name=id_error,json=idError,proto3,enum=google.ads.googleads.v8.errors.IdErrorEnum_IdError,oneof"`
}
type ErrorCode_ImageError struct {
// The reasons for the image error
ImageError ImageErrorEnum_ImageError `protobuf:"varint,40,opt,name=image_error,json=imageError,proto3,enum=google.ads.googleads.v8.errors.ImageErrorEnum_ImageError,oneof"`
}
type ErrorCode_LanguageCodeError struct {
// The reasons for the language code error
LanguageCodeError LanguageCodeErrorEnum_LanguageCodeError `protobuf:"varint,110,opt,name=language_code_error,json=languageCodeError,proto3,enum=google.ads.googleads.v8.errors.LanguageCodeErrorEnum_LanguageCodeError,oneof"`
}
type ErrorCode_MediaBundleError struct {
// The reasons for the media bundle error
MediaBundleError MediaBundleErrorEnum_MediaBundleError `protobuf:"varint,42,opt,name=media_bundle_error,json=mediaBundleError,proto3,enum=google.ads.googleads.v8.errors.MediaBundleErrorEnum_MediaBundleError,oneof"`
}
type ErrorCode_MediaUploadError struct {
// The reasons for media uploading errors.
MediaUploadError MediaUploadErrorEnum_MediaUploadError `protobuf:"varint,116,opt,name=media_upload_error,json=mediaUploadError,proto3,enum=google.ads.googleads.v8.errors.MediaUploadErrorEnum_MediaUploadError,oneof"`
}
type ErrorCode_MediaFileError struct {
// The reasons for the media file error
MediaFileError MediaFileErrorEnum_MediaFileError `protobuf:"varint,86,opt,name=media_file_error,json=mediaFileError,proto3,enum=google.ads.googleads.v8.errors.MediaFileErrorEnum_MediaFileError,oneof"`
}
type ErrorCode_MultiplierError struct {
// The reasons for the multiplier error
MultiplierError MultiplierErrorEnum_MultiplierError `protobuf:"varint,44,opt,name=multiplier_error,json=multiplierError,proto3,enum=google.ads.googleads.v8.errors.MultiplierErrorEnum_MultiplierError,oneof"`
}
type ErrorCode_NewResourceCreationError struct {
// The reasons for the new resource creation error
NewResourceCreationError NewResourceCreationErrorEnum_NewResourceCreationError `protobuf:"varint,45,opt,name=new_resource_creation_error,json=newResourceCreationError,proto3,enum=google.ads.googleads.v8.errors.NewResourceCreationErrorEnum_NewResourceCreationError,oneof"`
}
type ErrorCode_NotEmptyError struct {
// The reasons for the not empty error
NotEmptyError NotEmptyErrorEnum_NotEmptyError `protobuf:"varint,46,opt,name=not_empty_error,json=notEmptyError,proto3,enum=google.ads.googleads.v8.errors.NotEmptyErrorEnum_NotEmptyError,oneof"`
}
type ErrorCode_NullError struct {
// The reasons for the null error
NullError NullErrorEnum_NullError `protobuf:"varint,47,opt,name=null_error,json=nullError,proto3,enum=google.ads.googleads.v8.errors.NullErrorEnum_NullError,oneof"`
}
type ErrorCode_OperatorError struct {
// The reasons for the operator error
OperatorError OperatorErrorEnum_OperatorError `protobuf:"varint,48,opt,name=operator_error,json=operatorError,proto3,enum=google.ads.googleads.v8.errors.OperatorErrorEnum_OperatorError,oneof"`
}
type ErrorCode_RangeError struct {
// The reasons for the range error
RangeError RangeErrorEnum_RangeError `protobuf:"varint,49,opt,name=range_error,json=rangeError,proto3,enum=google.ads.googleads.v8.errors.RangeErrorEnum_RangeError,oneof"`
}
type ErrorCode_RecommendationError struct {
// The reasons for error in applying a recommendation
RecommendationError RecommendationErrorEnum_RecommendationError `protobuf:"varint,58,opt,name=recommendation_error,json=recommendationError,proto3,enum=google.ads.googleads.v8.errors.RecommendationErrorEnum_RecommendationError,oneof"`
}
type ErrorCode_RegionCodeError struct {
// The reasons for the region code error
RegionCodeError RegionCodeErrorEnum_RegionCodeError `protobuf:"varint,51,opt,name=region_code_error,json=regionCodeError,proto3,enum=google.ads.googleads.v8.errors.RegionCodeErrorEnum_RegionCodeError,oneof"`
}
type ErrorCode_SettingError struct {
// The reasons for the setting error
SettingError SettingErrorEnum_SettingError `protobuf:"varint,52,opt,name=setting_error,json=settingError,proto3,enum=google.ads.googleads.v8.errors.SettingErrorEnum_SettingError,oneof"`
}
type ErrorCode_StringFormatError struct {
// The reasons for the string format error
StringFormatError StringFormatErrorEnum_StringFormatError `protobuf:"varint,53,opt,name=string_format_error,json=stringFormatError,proto3,enum=google.ads.googleads.v8.errors.StringFormatErrorEnum_StringFormatError,oneof"`
}
type ErrorCode_StringLengthError struct {
// The reasons for the string length error
StringLengthError StringLengthErrorEnum_StringLengthError `protobuf:"varint,54,opt,name=string_length_error,json=stringLengthError,proto3,enum=google.ads.googleads.v8.errors.StringLengthErrorEnum_StringLengthError,oneof"`
}
type ErrorCode_OperationAccessDeniedError struct {
// The reasons for the operation access denied error
OperationAccessDeniedError OperationAccessDeniedErrorEnum_OperationAccessDeniedError `protobuf:"varint,55,opt,name=operation_access_denied_error,json=operationAccessDeniedError,proto3,enum=google.ads.googleads.v8.errors.OperationAccessDeniedErrorEnum_OperationAccessDeniedError,oneof"`
}
type ErrorCode_ResourceAccessDeniedError struct {
// The reasons for the resource access denied error
ResourceAccessDeniedError ResourceAccessDeniedErrorEnum_ResourceAccessDeniedError `protobuf:"varint,56,opt,name=resource_access_denied_error,json=resourceAccessDeniedError,proto3,enum=google.ads.googleads.v8.errors.ResourceAccessDeniedErrorEnum_ResourceAccessDeniedError,oneof"`
}
type ErrorCode_ResourceCountLimitExceededError struct {
// The reasons for the resource count limit exceeded error
ResourceCountLimitExceededError ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError `protobuf:"varint,57,opt,name=resource_count_limit_exceeded_error,json=resourceCountLimitExceededError,proto3,enum=google.ads.googleads.v8.errors.ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError,oneof"`
}
type ErrorCode_YoutubeVideoRegistrationError struct {
// The reasons for YouTube video registration errors.
YoutubeVideoRegistrationError YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError `protobuf:"varint,117,opt,name=youtube_video_registration_error,json=youtubeVideoRegistrationError,proto3,enum=google.ads.googleads.v8.errors.YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError,oneof"`
}
type ErrorCode_AdGroupBidModifierError struct {
// The reasons for the ad group bid modifier error
AdGroupBidModifierError AdGroupBidModifierErrorEnum_AdGroupBidModifierError `protobuf:"varint,59,opt,name=ad_group_bid_modifier_error,json=adGroupBidModifierError,proto3,enum=google.ads.googleads.v8.errors.AdGroupBidModifierErrorEnum_AdGroupBidModifierError,oneof"`
}
type ErrorCode_ContextError struct {
// The reasons for the context error
ContextError ContextErrorEnum_ContextError `protobuf:"varint,60,opt,name=context_error,json=contextError,proto3,enum=google.ads.googleads.v8.errors.ContextErrorEnum_ContextError,oneof"`
}
type ErrorCode_FieldError struct {
// The reasons for the field error
FieldError FieldErrorEnum_FieldError `protobuf:"varint,61,opt,name=field_error,json=fieldError,proto3,enum=google.ads.googleads.v8.errors.FieldErrorEnum_FieldError,oneof"`
}
type ErrorCode_SharedSetError struct {
// The reasons for the shared set error
SharedSetError SharedSetErrorEnum_SharedSetError `protobuf:"varint,62,opt,name=shared_set_error,json=sharedSetError,proto3,enum=google.ads.googleads.v8.errors.SharedSetErrorEnum_SharedSetError,oneof"`
}
type ErrorCode_SharedCriterionError struct {
// The reasons for the shared criterion error
SharedCriterionError SharedCriterionErrorEnum_SharedCriterionError `protobuf:"varint,63,opt,name=shared_criterion_error,json=sharedCriterionError,proto3,enum=google.ads.googleads.v8.errors.SharedCriterionErrorEnum_SharedCriterionError,oneof"`
}
type ErrorCode_CampaignSharedSetError struct {
// The reasons for the campaign shared set error
CampaignSharedSetError CampaignSharedSetErrorEnum_CampaignSharedSetError `protobuf:"varint,64,opt,name=campaign_shared_set_error,json=campaignSharedSetError,proto3,enum=google.ads.googleads.v8.errors.CampaignSharedSetErrorEnum_CampaignSharedSetError,oneof"`
}
type ErrorCode_ConversionActionError struct {
// The reasons for the conversion action error
ConversionActionError ConversionActionErrorEnum_ConversionActionError `protobuf:"varint,65,opt,name=conversion_action_error,json=conversionActionError,proto3,enum=google.ads.googleads.v8.errors.ConversionActionErrorEnum_ConversionActionError,oneof"`
}
type ErrorCode_ConversionAdjustmentUploadError struct {
// The reasons for the conversion adjustment upload error
ConversionAdjustmentUploadError ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError `protobuf:"varint,115,opt,name=conversion_adjustment_upload_error,json=conversionAdjustmentUploadError,proto3,enum=google.ads.googleads.v8.errors.ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError,oneof"`
}
type ErrorCode_ConversionCustomVariableError struct {
// The reasons for the conversion custom variable error
ConversionCustomVariableError ConversionCustomVariableErrorEnum_ConversionCustomVariableError `protobuf:"varint,143,opt,name=conversion_custom_variable_error,json=conversionCustomVariableError,proto3,enum=google.ads.googleads.v8.errors.ConversionCustomVariableErrorEnum_ConversionCustomVariableError,oneof"`
}
type ErrorCode_ConversionUploadError struct {
// The reasons for the conversion upload error
ConversionUploadError ConversionUploadErrorEnum_ConversionUploadError `protobuf:"varint,111,opt,name=conversion_upload_error,json=conversionUploadError,proto3,enum=google.ads.googleads.v8.errors.ConversionUploadErrorEnum_ConversionUploadError,oneof"`
}
type ErrorCode_ConversionValueRuleError struct {
// The reasons for the conversion value rule error
ConversionValueRuleError ConversionValueRuleErrorEnum_ConversionValueRuleError `protobuf:"varint,145,opt,name=conversion_value_rule_error,json=conversionValueRuleError,proto3,enum=google.ads.googleads.v8.errors.ConversionValueRuleErrorEnum_ConversionValueRuleError,oneof"`
}
type ErrorCode_ConversionValueRuleSetError struct {
// The reasons for the conversion value rule set error
ConversionValueRuleSetError ConversionValueRuleSetErrorEnum_ConversionValueRuleSetError `protobuf:"varint,146,opt,name=conversion_value_rule_set_error,json=conversionValueRuleSetError,proto3,enum=google.ads.googleads.v8.errors.ConversionValueRuleSetErrorEnum_ConversionValueRuleSetError,oneof"`
}
type ErrorCode_HeaderError struct {
// The reasons for the header error.
HeaderError HeaderErrorEnum_HeaderError `protobuf:"varint,66,opt,name=header_error,json=headerError,proto3,enum=google.ads.googleads.v8.errors.HeaderErrorEnum_HeaderError,oneof"`
}
type ErrorCode_DatabaseError struct {
// The reasons for the database error.
DatabaseError DatabaseErrorEnum_DatabaseError `protobuf:"varint,67,opt,name=database_error,json=databaseError,proto3,enum=google.ads.googleads.v8.errors.DatabaseErrorEnum_DatabaseError,oneof"`
}
type ErrorCode_PolicyFindingError struct {
// The reasons for the policy finding error.
PolicyFindingError PolicyFindingErrorEnum_PolicyFindingError `protobuf:"varint,68,opt,name=policy_finding_error,json=policyFindingError,proto3,enum=google.ads.googleads.v8.errors.PolicyFindingErrorEnum_PolicyFindingError,oneof"`
}
type ErrorCode_EnumError struct {
// The reason for enum error.
EnumError EnumErrorEnum_EnumError `protobuf:"varint,70,opt,name=enum_error,json=enumError,proto3,enum=google.ads.googleads.v8.errors.EnumErrorEnum_EnumError,oneof"`
}
type ErrorCode_KeywordPlanError struct {
// The reason for keyword plan error.
KeywordPlanError KeywordPlanErrorEnum_KeywordPlanError `protobuf:"varint,71,opt,name=keyword_plan_error,json=keywordPlanError,proto3,enum=google.ads.googleads.v8.errors.KeywordPlanErrorEnum_KeywordPlanError,oneof"`
}
type ErrorCode_KeywordPlanCampaignError struct {
// The reason for keyword plan campaign error.
KeywordPlanCampaignError KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError `protobuf:"varint,72,opt,name=keyword_plan_campaign_error,json=keywordPlanCampaignError,proto3,enum=google.ads.googleads.v8.errors.KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError,oneof"`
}
type ErrorCode_KeywordPlanCampaignKeywordError struct {
// The reason for keyword plan campaign keyword error.
KeywordPlanCampaignKeywordError KeywordPlanCampaignKeywordErrorEnum_KeywordPlanCampaignKeywordError `protobuf:"varint,132,opt,name=keyword_plan_campaign_keyword_error,json=keywordPlanCampaignKeywordError,proto3,enum=google.ads.googleads.v8.errors.KeywordPlanCampaignKeywordErrorEnum_KeywordPlanCampaignKeywordError,oneof"`
}
type ErrorCode_KeywordPlanAdGroupError struct {
// The reason for keyword plan ad group error.
KeywordPlanAdGroupError KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError `protobuf:"varint,74,opt,name=keyword_plan_ad_group_error,json=keywordPlanAdGroupError,proto3,enum=google.ads.googleads.v8.errors.KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError,oneof"`
}
type ErrorCode_KeywordPlanAdGroupKeywordError struct {
// The reason for keyword plan ad group keyword error.
KeywordPlanAdGroupKeywordError KeywordPlanAdGroupKeywordErrorEnum_KeywordPlanAdGroupKeywordError `protobuf:"varint,133,opt,name=keyword_plan_ad_group_keyword_error,json=keywordPlanAdGroupKeywordError,proto3,enum=google.ads.googleads.v8.errors.KeywordPlanAdGroupKeywordErrorEnum_KeywordPlanAdGroupKeywordError,oneof"`
}
type ErrorCode_KeywordPlanIdeaError struct {
// The reason for keyword idea error.
KeywordPlanIdeaError KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError `protobuf:"varint,76,opt,name=keyword_plan_idea_error,json=keywordPlanIdeaError,proto3,enum=google.ads.googleads.v8.errors.KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError,oneof"`
}
type ErrorCode_AccountBudgetProposalError struct {
// The reasons for account budget proposal errors.
AccountBudgetProposalError AccountBudgetProposalErrorEnum_AccountBudgetProposalError `protobuf:"varint,77,opt,name=account_budget_proposal_error,json=accountBudgetProposalError,proto3,enum=google.ads.googleads.v8.errors.AccountBudgetProposalErrorEnum_AccountBudgetProposalError,oneof"`
}
type ErrorCode_UserListError struct {
// The reasons for the user list error
UserListError UserListErrorEnum_UserListError `protobuf:"varint,78,opt,name=user_list_error,json=userListError,proto3,enum=google.ads.googleads.v8.errors.UserListErrorEnum_UserListError,oneof"`
}
type ErrorCode_ChangeEventError struct {
// The reasons for the change event error
ChangeEventError ChangeEventErrorEnum_ChangeEventError `protobuf:"varint,136,opt,name=change_event_error,json=changeEventError,proto3,enum=google.ads.googleads.v8.errors.ChangeEventErrorEnum_ChangeEventError,oneof"`
}
type ErrorCode_ChangeStatusError struct {
// The reasons for the change status error
ChangeStatusError ChangeStatusErrorEnum_ChangeStatusError `protobuf:"varint,79,opt,name=change_status_error,json=changeStatusError,proto3,enum=google.ads.googleads.v8.errors.ChangeStatusErrorEnum_ChangeStatusError,oneof"`
}
type ErrorCode_FeedError struct {
// The reasons for the feed error
FeedError FeedErrorEnum_FeedError `protobuf:"varint,80,opt,name=feed_error,json=feedError,proto3,enum=google.ads.googleads.v8.errors.FeedErrorEnum_FeedError,oneof"`
}
type ErrorCode_GeoTargetConstantSuggestionError struct {
// The reasons for the geo target constant suggestion error.
GeoTargetConstantSuggestionError GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError `protobuf:"varint,81,opt,name=geo_target_constant_suggestion_error,json=geoTargetConstantSuggestionError,proto3,enum=google.ads.googleads.v8.errors.GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError,oneof"`
}
type ErrorCode_CampaignDraftError struct {
// The reasons for the campaign draft error
CampaignDraftError CampaignDraftErrorEnum_CampaignDraftError `protobuf:"varint,82,opt,name=campaign_draft_error,json=campaignDraftError,proto3,enum=google.ads.googleads.v8.errors.CampaignDraftErrorEnum_CampaignDraftError,oneof"`
}
type ErrorCode_FeedItemError struct {
// The reasons for the feed item error
FeedItemError FeedItemErrorEnum_FeedItemError `protobuf:"varint,83,opt,name=feed_item_error,json=feedItemError,proto3,enum=google.ads.googleads.v8.errors.FeedItemErrorEnum_FeedItemError,oneof"`
}
type ErrorCode_LabelError struct {
// The reason for the label error.
LabelError LabelErrorEnum_LabelError `protobuf:"varint,84,opt,name=label_error,json=labelError,proto3,enum=google.ads.googleads.v8.errors.LabelErrorEnum_LabelError,oneof"`
}
type ErrorCode_BillingSetupError struct {
// The reasons for the billing setup error
BillingSetupError BillingSetupErrorEnum_BillingSetupError `protobuf:"varint,87,opt,name=billing_setup_error,json=billingSetupError,proto3,enum=google.ads.googleads.v8.errors.BillingSetupErrorEnum_BillingSetupError,oneof"`
}
type ErrorCode_CustomerClientLinkError struct {
// The reasons for the customer client link error
CustomerClientLinkError CustomerClientLinkErrorEnum_CustomerClientLinkError `protobuf:"varint,88,opt,name=customer_client_link_error,json=customerClientLinkError,proto3,enum=google.ads.googleads.v8.errors.CustomerClientLinkErrorEnum_CustomerClientLinkError,oneof"`
}
type ErrorCode_CustomerManagerLinkError struct {
// The reasons for the customer manager link error
CustomerManagerLinkError CustomerManagerLinkErrorEnum_CustomerManagerLinkError `protobuf:"varint,91,opt,name=customer_manager_link_error,json=customerManagerLinkError,proto3,enum=google.ads.googleads.v8.errors.CustomerManagerLinkErrorEnum_CustomerManagerLinkError,oneof"`
}
type ErrorCode_FeedMappingError struct {
// The reasons for the feed mapping error
FeedMappingError FeedMappingErrorEnum_FeedMappingError `protobuf:"varint,92,opt,name=feed_mapping_error,json=feedMappingError,proto3,enum=google.ads.googleads.v8.errors.FeedMappingErrorEnum_FeedMappingError,oneof"`
}
type ErrorCode_CustomerFeedError struct {
// The reasons for the customer feed error
CustomerFeedError CustomerFeedErrorEnum_CustomerFeedError `protobuf:"varint,93,opt,name=customer_feed_error,json=customerFeedError,proto3,enum=google.ads.googleads.v8.errors.CustomerFeedErrorEnum_CustomerFeedError,oneof"`
}
type ErrorCode_AdGroupFeedError struct {
// The reasons for the ad group feed error
AdGroupFeedError AdGroupFeedErrorEnum_AdGroupFeedError `protobuf:"varint,94,opt,name=ad_group_feed_error,json=adGroupFeedError,proto3,enum=google.ads.googleads.v8.errors.AdGroupFeedErrorEnum_AdGroupFeedError,oneof"`
}
type ErrorCode_CampaignFeedError struct {
// The reasons for the campaign feed error
CampaignFeedError CampaignFeedErrorEnum_CampaignFeedError `protobuf:"varint,96,opt,name=campaign_feed_error,json=campaignFeedError,proto3,enum=google.ads.googleads.v8.errors.CampaignFeedErrorEnum_CampaignFeedError,oneof"`
}
type ErrorCode_CustomInterestError struct {
// The reasons for the custom interest error
CustomInterestError CustomInterestErrorEnum_CustomInterestError `protobuf:"varint,97,opt,name=custom_interest_error,json=customInterestError,proto3,enum=google.ads.googleads.v8.errors.CustomInterestErrorEnum_CustomInterestError,oneof"`
}
type ErrorCode_CampaignExperimentError struct {
// The reasons for the campaign experiment error
CampaignExperimentError CampaignExperimentErrorEnum_CampaignExperimentError `protobuf:"varint,98,opt,name=campaign_experiment_error,json=campaignExperimentError,proto3,enum=google.ads.googleads.v8.errors.CampaignExperimentErrorEnum_CampaignExperimentError,oneof"`
}
type ErrorCode_ExtensionFeedItemError struct {
// The reasons for the extension feed item error
ExtensionFeedItemError ExtensionFeedItemErrorEnum_ExtensionFeedItemError `protobuf:"varint,100,opt,name=extension_feed_item_error,json=extensionFeedItemError,proto3,enum=google.ads.googleads.v8.errors.ExtensionFeedItemErrorEnum_ExtensionFeedItemError,oneof"`
}
type ErrorCode_AdParameterError struct {
// The reasons for the ad parameter error
AdParameterError AdParameterErrorEnum_AdParameterError `protobuf:"varint,101,opt,name=ad_parameter_error,json=adParameterError,proto3,enum=google.ads.googleads.v8.errors.AdParameterErrorEnum_AdParameterError,oneof"`
}
type ErrorCode_FeedItemValidationError struct {
// The reasons for the feed item validation error
FeedItemValidationError FeedItemValidationErrorEnum_FeedItemValidationError `protobuf:"varint,102,opt,name=feed_item_validation_error,json=feedItemValidationError,proto3,enum=google.ads.googleads.v8.errors.FeedItemValidationErrorEnum_FeedItemValidationError,oneof"`
}
type ErrorCode_ExtensionSettingError struct {
// The reasons for the extension setting error
ExtensionSettingError ExtensionSettingErrorEnum_ExtensionSettingError `protobuf:"varint,103,opt,name=extension_setting_error,json=extensionSettingError,proto3,enum=google.ads.googleads.v8.errors.ExtensionSettingErrorEnum_ExtensionSettingError,oneof"`
}
type ErrorCode_FeedItemSetError struct {
// The reasons for the feed item set error
FeedItemSetError FeedItemSetErrorEnum_FeedItemSetError `protobuf:"varint,140,opt,name=feed_item_set_error,json=feedItemSetError,proto3,enum=google.ads.googleads.v8.errors.FeedItemSetErrorEnum_FeedItemSetError,oneof"`
}
type ErrorCode_FeedItemSetLinkError struct {
// The reasons for the feed item set link error
FeedItemSetLinkError FeedItemSetLinkErrorEnum_FeedItemSetLinkError `protobuf:"varint,141,opt,name=feed_item_set_link_error,json=feedItemSetLinkError,proto3,enum=google.ads.googleads.v8.errors.FeedItemSetLinkErrorEnum_FeedItemSetLinkError,oneof"`
}
type ErrorCode_FeedItemTargetError struct {
// The reasons for the feed item target error
FeedItemTargetError FeedItemTargetErrorEnum_FeedItemTargetError `protobuf:"varint,104,opt,name=feed_item_target_error,json=feedItemTargetError,proto3,enum=google.ads.googleads.v8.errors.FeedItemTargetErrorEnum_FeedItemTargetError,oneof"`
}
type ErrorCode_PolicyViolationError struct {
// The reasons for the policy violation error
PolicyViolationError PolicyViolationErrorEnum_PolicyViolationError `protobuf:"varint,105,opt,name=policy_violation_error,json=policyViolationError,proto3,enum=google.ads.googleads.v8.errors.PolicyViolationErrorEnum_PolicyViolationError,oneof"`
}
type ErrorCode_PartialFailureError struct {
// The reasons for the mutate job error
PartialFailureError PartialFailureErrorEnum_PartialFailureError `protobuf:"varint,112,opt,name=partial_failure_error,json=partialFailureError,proto3,enum=google.ads.googleads.v8.errors.PartialFailureErrorEnum_PartialFailureError,oneof"`
}
type ErrorCode_PolicyValidationParameterError struct {
// The reasons for the policy validation parameter error
PolicyValidationParameterError PolicyValidationParameterErrorEnum_PolicyValidationParameterError `protobuf:"varint,114,opt,name=policy_validation_parameter_error,json=policyValidationParameterError,proto3,enum=google.ads.googleads.v8.errors.PolicyValidationParameterErrorEnum_PolicyValidationParameterError,oneof"`
}
type ErrorCode_SizeLimitError struct {
// The reasons for the size limit error
SizeLimitError SizeLimitErrorEnum_SizeLimitError `protobuf:"varint,118,opt,name=size_limit_error,json=sizeLimitError,proto3,enum=google.ads.googleads.v8.errors.SizeLimitErrorEnum_SizeLimitError,oneof"`
}
type ErrorCode_OfflineUserDataJobError struct {
// The reasons for the offline user data job error.
OfflineUserDataJobError OfflineUserDataJobErrorEnum_OfflineUserDataJobError `protobuf:"varint,119,opt,name=offline_user_data_job_error,json=offlineUserDataJobError,proto3,enum=google.ads.googleads.v8.errors.OfflineUserDataJobErrorEnum_OfflineUserDataJobError,oneof"`
}
type ErrorCode_NotAllowlistedError struct {
// The reasons for the not allowlisted error
NotAllowlistedError NotAllowlistedErrorEnum_NotAllowlistedError `protobuf:"varint,137,opt,name=not_allowlisted_error,json=notAllowlistedError,proto3,enum=google.ads.googleads.v8.errors.NotAllowlistedErrorEnum_NotAllowlistedError,oneof"`
}
type ErrorCode_ManagerLinkError struct {
// The reasons for the manager link error
ManagerLinkError ManagerLinkErrorEnum_ManagerLinkError `protobuf:"varint,121,opt,name=manager_link_error,json=managerLinkError,proto3,enum=google.ads.googleads.v8.errors.ManagerLinkErrorEnum_ManagerLinkError,oneof"`
}
type ErrorCode_CurrencyCodeError struct {
// The reasons for the currency code error
CurrencyCodeError CurrencyCodeErrorEnum_CurrencyCodeError `protobuf:"varint,122,opt,name=currency_code_error,json=currencyCodeError,proto3,enum=google.ads.googleads.v8.errors.CurrencyCodeErrorEnum_CurrencyCodeError,oneof"`
}
type ErrorCode_AccessInvitationError struct {
// The reasons for the access invitation error
AccessInvitationError AccessInvitationErrorEnum_AccessInvitationError `protobuf:"varint,124,opt,name=access_invitation_error,json=accessInvitationError,proto3,enum=google.ads.googleads.v8.errors.AccessInvitationErrorEnum_AccessInvitationError,oneof"`
}
type ErrorCode_ReachPlanError struct {
// The reasons for the reach plan error
ReachPlanError ReachPlanErrorEnum_ReachPlanError `protobuf:"varint,125,opt,name=reach_plan_error,json=reachPlanError,proto3,enum=google.ads.googleads.v8.errors.ReachPlanErrorEnum_ReachPlanError,oneof"`
}
type ErrorCode_InvoiceError struct {
// The reasons for the invoice error
InvoiceError InvoiceErrorEnum_InvoiceError `protobuf:"varint,126,opt,name=invoice_error,json=invoiceError,proto3,enum=google.ads.googleads.v8.errors.InvoiceErrorEnum_InvoiceError,oneof"`
}
type ErrorCode_PaymentsAccountError struct {
// The reasons for errors in payments accounts service
PaymentsAccountError PaymentsAccountErrorEnum_PaymentsAccountError `protobuf:"varint,127,opt,name=payments_account_error,json=paymentsAccountError,proto3,enum=google.ads.googleads.v8.errors.PaymentsAccountErrorEnum_PaymentsAccountError,oneof"`
}
type ErrorCode_TimeZoneError struct {
// The reasons for the time zone error
TimeZoneError TimeZoneErrorEnum_TimeZoneError `protobuf:"varint,128,opt,name=time_zone_error,json=timeZoneError,proto3,enum=google.ads.googleads.v8.errors.TimeZoneErrorEnum_TimeZoneError,oneof"`
}
type ErrorCode_AssetLinkError struct {
// The reasons for the asset link error
AssetLinkError AssetLinkErrorEnum_AssetLinkError `protobuf:"varint,129,opt,name=asset_link_error,json=assetLinkError,proto3,enum=google.ads.googleads.v8.errors.AssetLinkErrorEnum_AssetLinkError,oneof"`
}
type ErrorCode_UserDataError struct {
// The reasons for the user data error.
UserDataError UserDataErrorEnum_UserDataError `protobuf:"varint,130,opt,name=user_data_error,json=userDataError,proto3,enum=google.ads.googleads.v8.errors.UserDataErrorEnum_UserDataError,oneof"`
}
type ErrorCode_BatchJobError struct {
// The reasons for the batch job error
BatchJobError BatchJobErrorEnum_BatchJobError `protobuf:"varint,131,opt,name=batch_job_error,json=batchJobError,proto3,enum=google.ads.googleads.v8.errors.BatchJobErrorEnum_BatchJobError,oneof"`
}
type ErrorCode_AccountLinkError struct {
// The reasons for the account link status change error
AccountLinkError AccountLinkErrorEnum_AccountLinkError `protobuf:"varint,134,opt,name=account_link_error,json=accountLinkError,proto3,enum=google.ads.googleads.v8.errors.AccountLinkErrorEnum_AccountLinkError,oneof"`
}
type ErrorCode_ThirdPartyAppAnalyticsLinkError struct {
// The reasons for the third party app analytics link mutate error
ThirdPartyAppAnalyticsLinkError ThirdPartyAppAnalyticsLinkErrorEnum_ThirdPartyAppAnalyticsLinkError `protobuf:"varint,135,opt,name=third_party_app_analytics_link_error,json=thirdPartyAppAnalyticsLinkError,proto3,enum=google.ads.googleads.v8.errors.ThirdPartyAppAnalyticsLinkErrorEnum_ThirdPartyAppAnalyticsLinkError,oneof"`
}
type ErrorCode_CustomerUserAccessError struct {
// The reasons for the customer user access mutate error
CustomerUserAccessError CustomerUserAccessErrorEnum_CustomerUserAccessError `protobuf:"varint,138,opt,name=customer_user_access_error,json=customerUserAccessError,proto3,enum=google.ads.googleads.v8.errors.CustomerUserAccessErrorEnum_CustomerUserAccessError,oneof"`
}
type ErrorCode_CustomAudienceError struct {
// The reasons for the custom audience error
CustomAudienceError CustomAudienceErrorEnum_CustomAudienceError `protobuf:"varint,139,opt,name=custom_audience_error,json=customAudienceError,proto3,enum=google.ads.googleads.v8.errors.CustomAudienceErrorEnum_CustomAudienceError,oneof"`
}
func (*ErrorCode_RequestError) isErrorCode_ErrorCode() {}
func (*ErrorCode_BiddingStrategyError) isErrorCode_ErrorCode() {}
func (*ErrorCode_UrlFieldError) isErrorCode_ErrorCode() {}
func (*ErrorCode_ListOperationError) isErrorCode_ErrorCode() {}
func (*ErrorCode_QueryError) isErrorCode_ErrorCode() {}
func (*ErrorCode_MutateError) isErrorCode_ErrorCode() {}
func (*ErrorCode_FieldMaskError) isErrorCode_ErrorCode() {}
func (*ErrorCode_AuthorizationError) isErrorCode_ErrorCode() {}
func (*ErrorCode_InternalError) isErrorCode_ErrorCode() {}
func (*ErrorCode_QuotaError) isErrorCode_ErrorCode() {}
func (*ErrorCode_AdError) isErrorCode_ErrorCode() {}
func (*ErrorCode_AdGroupError) isErrorCode_ErrorCode() {}
func (*ErrorCode_CampaignBudgetError) isErrorCode_ErrorCode() {}
func (*ErrorCode_CampaignError) isErrorCode_ErrorCode() {}
func (*ErrorCode_AuthenticationError) isErrorCode_ErrorCode() {}
func (*ErrorCode_AdGroupCriterionError) isErrorCode_ErrorCode() {}
func (*ErrorCode_AdCustomizerError) isErrorCode_ErrorCode() {}
func (*ErrorCode_AdGroupAdError) isErrorCode_ErrorCode() {}
func (*ErrorCode_AdSharingError) isErrorCode_ErrorCode() {}
func (*ErrorCode_AdxError) isErrorCode_ErrorCode() {}
func (*ErrorCode_AssetError) isErrorCode_ErrorCode() {}
func (*ErrorCode_BiddingError) isErrorCode_ErrorCode() {}
func (*ErrorCode_CampaignCriterionError) isErrorCode_ErrorCode() {}
func (*ErrorCode_CollectionSizeError) isErrorCode_ErrorCode() {}
func (*ErrorCode_CountryCodeError) isErrorCode_ErrorCode() {}
func (*ErrorCode_CriterionError) isErrorCode_ErrorCode() {}
func (*ErrorCode_CustomerError) isErrorCode_ErrorCode() {}
func (*ErrorCode_DateError) isErrorCode_ErrorCode() {}
func (*ErrorCode_DateRangeError) isErrorCode_ErrorCode() {}
func (*ErrorCode_DistinctError) isErrorCode_ErrorCode() {}
func (*ErrorCode_FeedAttributeReferenceError) isErrorCode_ErrorCode() {}
func (*ErrorCode_FunctionError) isErrorCode_ErrorCode() {}
func (*ErrorCode_FunctionParsingError) isErrorCode_ErrorCode() {}
func (*ErrorCode_IdError) isErrorCode_ErrorCode() {}
func (*ErrorCode_ImageError) isErrorCode_ErrorCode() {}
func (*ErrorCode_LanguageCodeError) isErrorCode_ErrorCode() {}
func (*ErrorCode_MediaBundleError) isErrorCode_ErrorCode() {}
func (*ErrorCode_MediaUploadError) isErrorCode_ErrorCode() {}
func (*ErrorCode_MediaFileError) isErrorCode_ErrorCode() {}
func (*ErrorCode_MultiplierError) isErrorCode_ErrorCode() {}
func (*ErrorCode_NewResourceCreationError) isErrorCode_ErrorCode() {}
func (*ErrorCode_NotEmptyError) isErrorCode_ErrorCode() {}
func (*ErrorCode_NullError) isErrorCode_ErrorCode() {}
func (*ErrorCode_OperatorError) isErrorCode_ErrorCode() {}
func (*ErrorCode_RangeError) isErrorCode_ErrorCode() {}
func (*ErrorCode_RecommendationError) isErrorCode_ErrorCode() {}
func (*ErrorCode_RegionCodeError) isErrorCode_ErrorCode() {}
func (*ErrorCode_SettingError) isErrorCode_ErrorCode() {}
func (*ErrorCode_StringFormatError) isErrorCode_ErrorCode() {}
func (*ErrorCode_StringLengthError) isErrorCode_ErrorCode() {}
func (*ErrorCode_OperationAccessDeniedError) isErrorCode_ErrorCode() {}
func (*ErrorCode_ResourceAccessDeniedError) isErrorCode_ErrorCode() {}
func (*ErrorCode_ResourceCountLimitExceededError) isErrorCode_ErrorCode() {}
func (*ErrorCode_YoutubeVideoRegistrationError) isErrorCode_ErrorCode() {}
func (*ErrorCode_AdGroupBidModifierError) isErrorCode_ErrorCode() {}
func (*ErrorCode_ContextError) isErrorCode_ErrorCode() {}
func (*ErrorCode_FieldError) isErrorCode_ErrorCode() {}
func (*ErrorCode_SharedSetError) isErrorCode_ErrorCode() {}
func (*ErrorCode_SharedCriterionError) isErrorCode_ErrorCode() {}
func (*ErrorCode_CampaignSharedSetError) isErrorCode_ErrorCode() {}
func (*ErrorCode_ConversionActionError) isErrorCode_ErrorCode() {}
func (*ErrorCode_ConversionAdjustmentUploadError) isErrorCode_ErrorCode() {}
func (*ErrorCode_ConversionCustomVariableError) isErrorCode_ErrorCode() {}
func (*ErrorCode_ConversionUploadError) isErrorCode_ErrorCode() {}
func (*ErrorCode_ConversionValueRuleError) isErrorCode_ErrorCode() {}
func (*ErrorCode_ConversionValueRuleSetError) isErrorCode_ErrorCode() {}
func (*ErrorCode_HeaderError) isErrorCode_ErrorCode() {}
func (*ErrorCode_DatabaseError) isErrorCode_ErrorCode() {}
func (*ErrorCode_PolicyFindingError) isErrorCode_ErrorCode() {}
func (*ErrorCode_EnumError) isErrorCode_ErrorCode() {}
func (*ErrorCode_KeywordPlanError) isErrorCode_ErrorCode() {}
func (*ErrorCode_KeywordPlanCampaignError) isErrorCode_ErrorCode() {}
func (*ErrorCode_KeywordPlanCampaignKeywordError) isErrorCode_ErrorCode() {}
func (*ErrorCode_KeywordPlanAdGroupError) isErrorCode_ErrorCode() {}
func (*ErrorCode_KeywordPlanAdGroupKeywordError) isErrorCode_ErrorCode() {}
func (*ErrorCode_KeywordPlanIdeaError) isErrorCode_ErrorCode() {}
func (*ErrorCode_AccountBudgetProposalError) isErrorCode_ErrorCode() {}
func (*ErrorCode_UserListError) isErrorCode_ErrorCode() {}
func (*ErrorCode_ChangeEventError) isErrorCode_ErrorCode() {}
func (*ErrorCode_ChangeStatusError) isErrorCode_ErrorCode() {}
func (*ErrorCode_FeedError) isErrorCode_ErrorCode() {}
func (*ErrorCode_GeoTargetConstantSuggestionError) isErrorCode_ErrorCode() {}
func (*ErrorCode_CampaignDraftError) isErrorCode_ErrorCode() {}
func (*ErrorCode_FeedItemError) isErrorCode_ErrorCode() {}
func (*ErrorCode_LabelError) isErrorCode_ErrorCode() {}
func (*ErrorCode_BillingSetupError) isErrorCode_ErrorCode() {}
func (*ErrorCode_CustomerClientLinkError) isErrorCode_ErrorCode() {}
func (*ErrorCode_CustomerManagerLinkError) isErrorCode_ErrorCode() {}
func (*ErrorCode_FeedMappingError) isErrorCode_ErrorCode() {}
func (*ErrorCode_CustomerFeedError) isErrorCode_ErrorCode() {}
func (*ErrorCode_AdGroupFeedError) isErrorCode_ErrorCode() {}
func (*ErrorCode_CampaignFeedError) isErrorCode_ErrorCode() {}
func (*ErrorCode_CustomInterestError) isErrorCode_ErrorCode() {}
func (*ErrorCode_CampaignExperimentError) isErrorCode_ErrorCode() {}
func (*ErrorCode_ExtensionFeedItemError) isErrorCode_ErrorCode() {}
func (*ErrorCode_AdParameterError) isErrorCode_ErrorCode() {}
func (*ErrorCode_FeedItemValidationError) isErrorCode_ErrorCode() {}
func (*ErrorCode_ExtensionSettingError) isErrorCode_ErrorCode() {}
func (*ErrorCode_FeedItemSetError) isErrorCode_ErrorCode() {}
func (*ErrorCode_FeedItemSetLinkError) isErrorCode_ErrorCode() {}
func (*ErrorCode_FeedItemTargetError) isErrorCode_ErrorCode() {}
func (*ErrorCode_PolicyViolationError) isErrorCode_ErrorCode() {}
func (*ErrorCode_PartialFailureError) isErrorCode_ErrorCode() {}
func (*ErrorCode_PolicyValidationParameterError) isErrorCode_ErrorCode() {}
func (*ErrorCode_SizeLimitError) isErrorCode_ErrorCode() {}
func (*ErrorCode_OfflineUserDataJobError) isErrorCode_ErrorCode() {}
func (*ErrorCode_NotAllowlistedError) isErrorCode_ErrorCode() {}
func (*ErrorCode_ManagerLinkError) isErrorCode_ErrorCode() {}
func (*ErrorCode_CurrencyCodeError) isErrorCode_ErrorCode() {}
func (*ErrorCode_AccessInvitationError) isErrorCode_ErrorCode() {}
func (*ErrorCode_ReachPlanError) isErrorCode_ErrorCode() {}
func (*ErrorCode_InvoiceError) isErrorCode_ErrorCode() {}
func (*ErrorCode_PaymentsAccountError) isErrorCode_ErrorCode() {}
func (*ErrorCode_TimeZoneError) isErrorCode_ErrorCode() {}
func (*ErrorCode_AssetLinkError) isErrorCode_ErrorCode() {}
func (*ErrorCode_UserDataError) isErrorCode_ErrorCode() {}
func (*ErrorCode_BatchJobError) isErrorCode_ErrorCode() {}
func (*ErrorCode_AccountLinkError) isErrorCode_ErrorCode() {}
func (*ErrorCode_ThirdPartyAppAnalyticsLinkError) isErrorCode_ErrorCode() {}
func (*ErrorCode_CustomerUserAccessError) isErrorCode_ErrorCode() {}
func (*ErrorCode_CustomAudienceError) isErrorCode_ErrorCode() {}
// Describes the part of the request proto that caused the error.
type ErrorLocation struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// A field path that indicates which field was invalid in the request.
FieldPathElements []*ErrorLocation_FieldPathElement `protobuf:"bytes,2,rep,name=field_path_elements,json=fieldPathElements,proto3" json:"field_path_elements,omitempty"`
}
func (x *ErrorLocation) Reset() {
*x = ErrorLocation{}
if protoimpl.UnsafeEnabled {
mi := &file_google_ads_googleads_v8_errors_errors_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ErrorLocation) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ErrorLocation) ProtoMessage() {}
func (x *ErrorLocation) ProtoReflect() protoreflect.Message {
mi := &file_google_ads_googleads_v8_errors_errors_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ErrorLocation.ProtoReflect.Descriptor instead.
func (*ErrorLocation) Descriptor() ([]byte, []int) {
return file_google_ads_googleads_v8_errors_errors_proto_rawDescGZIP(), []int{3}
}
func (x *ErrorLocation) GetFieldPathElements() []*ErrorLocation_FieldPathElement {
if x != nil {
return x.FieldPathElements
}
return nil
}
// Additional error details.
type ErrorDetails struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// The error code that should have been returned, but wasn't. This is used
// when the error code is not published in the client specified version.
UnpublishedErrorCode string `protobuf:"bytes,1,opt,name=unpublished_error_code,json=unpublishedErrorCode,proto3" json:"unpublished_error_code,omitempty"`
// Describes an ad policy violation.
PolicyViolationDetails *PolicyViolationDetails `protobuf:"bytes,2,opt,name=policy_violation_details,json=policyViolationDetails,proto3" json:"policy_violation_details,omitempty"`
// Describes policy violation findings.
PolicyFindingDetails *PolicyFindingDetails `protobuf:"bytes,3,opt,name=policy_finding_details,json=policyFindingDetails,proto3" json:"policy_finding_details,omitempty"`
// Details on the quota error, including the scope (account or developer), the
// rate bucket name and the retry delay.
QuotaErrorDetails *QuotaErrorDetails `protobuf:"bytes,4,opt,name=quota_error_details,json=quotaErrorDetails,proto3" json:"quota_error_details,omitempty"`
// Details for a resource count limit exceeded error.
ResourceCountDetails *ResourceCountDetails `protobuf:"bytes,5,opt,name=resource_count_details,json=resourceCountDetails,proto3" json:"resource_count_details,omitempty"`
}
func (x *ErrorDetails) Reset() {
*x = ErrorDetails{}
if protoimpl.UnsafeEnabled {
mi := &file_google_ads_googleads_v8_errors_errors_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ErrorDetails) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ErrorDetails) ProtoMessage() {}
func (x *ErrorDetails) ProtoReflect() protoreflect.Message {
mi := &file_google_ads_googleads_v8_errors_errors_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ErrorDetails.ProtoReflect.Descriptor instead.
func (*ErrorDetails) Descriptor() ([]byte, []int) {
return file_google_ads_googleads_v8_errors_errors_proto_rawDescGZIP(), []int{4}
}
func (x *ErrorDetails) GetUnpublishedErrorCode() string {
if x != nil {
return x.UnpublishedErrorCode
}
return ""
}
func (x *ErrorDetails) GetPolicyViolationDetails() *PolicyViolationDetails {
if x != nil {
return x.PolicyViolationDetails
}
return nil
}
func (x *ErrorDetails) GetPolicyFindingDetails() *PolicyFindingDetails {
if x != nil {
return x.PolicyFindingDetails
}
return nil
}
func (x *ErrorDetails) GetQuotaErrorDetails() *QuotaErrorDetails {
if x != nil {
return x.QuotaErrorDetails
}
return nil
}
func (x *ErrorDetails) GetResourceCountDetails() *ResourceCountDetails {
if x != nil {
return x.ResourceCountDetails
}
return nil
}
// Error returned as part of a mutate response.
// This error indicates single policy violation by some text
// in one of the fields.
type PolicyViolationDetails struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Human readable description of policy violation.
ExternalPolicyDescription string `protobuf:"bytes,2,opt,name=external_policy_description,json=externalPolicyDescription,proto3" json:"external_policy_description,omitempty"`
// Unique identifier for this violation.
// If policy is exemptible, this key may be used to request exemption.
Key *common.PolicyViolationKey `protobuf:"bytes,4,opt,name=key,proto3" json:"key,omitempty"`
// Human readable name of the policy.
ExternalPolicyName string `protobuf:"bytes,5,opt,name=external_policy_name,json=externalPolicyName,proto3" json:"external_policy_name,omitempty"`
// Whether user can file an exemption request for this violation.
IsExemptible bool `protobuf:"varint,6,opt,name=is_exemptible,json=isExemptible,proto3" json:"is_exemptible,omitempty"`
}
func (x *PolicyViolationDetails) Reset() {
*x = PolicyViolationDetails{}
if protoimpl.UnsafeEnabled {
mi := &file_google_ads_googleads_v8_errors_errors_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *PolicyViolationDetails) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*PolicyViolationDetails) ProtoMessage() {}
func (x *PolicyViolationDetails) ProtoReflect() protoreflect.Message {
mi := &file_google_ads_googleads_v8_errors_errors_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use PolicyViolationDetails.ProtoReflect.Descriptor instead.
func (*PolicyViolationDetails) Descriptor() ([]byte, []int) {
return file_google_ads_googleads_v8_errors_errors_proto_rawDescGZIP(), []int{5}
}
func (x *PolicyViolationDetails) GetExternalPolicyDescription() string {
if x != nil {
return x.ExternalPolicyDescription
}
return ""
}
func (x *PolicyViolationDetails) GetKey() *common.PolicyViolationKey {
if x != nil {
return x.Key
}
return nil
}
func (x *PolicyViolationDetails) GetExternalPolicyName() string {
if x != nil {
return x.ExternalPolicyName
}
return ""
}
func (x *PolicyViolationDetails) GetIsExemptible() bool {
if x != nil {
return x.IsExemptible
}
return false
}
// Error returned as part of a mutate response.
// This error indicates one or more policy findings in the fields of a
// resource.
type PolicyFindingDetails struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// The list of policy topics for the resource. Contains the PROHIBITED or
// FULLY_LIMITED policy topic entries that prevented the resource from being
// saved (among any other entries the resource may also have).
PolicyTopicEntries []*common.PolicyTopicEntry `protobuf:"bytes,1,rep,name=policy_topic_entries,json=policyTopicEntries,proto3" json:"policy_topic_entries,omitempty"`
}
func (x *PolicyFindingDetails) Reset() {
*x = PolicyFindingDetails{}
if protoimpl.UnsafeEnabled {
mi := &file_google_ads_googleads_v8_errors_errors_proto_msgTypes[6]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *PolicyFindingDetails) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*PolicyFindingDetails) ProtoMessage() {}
func (x *PolicyFindingDetails) ProtoReflect() protoreflect.Message {
mi := &file_google_ads_googleads_v8_errors_errors_proto_msgTypes[6]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use PolicyFindingDetails.ProtoReflect.Descriptor instead.
func (*PolicyFindingDetails) Descriptor() ([]byte, []int) {
return file_google_ads_googleads_v8_errors_errors_proto_rawDescGZIP(), []int{6}
}
func (x *PolicyFindingDetails) GetPolicyTopicEntries() []*common.PolicyTopicEntry {
if x != nil {
return x.PolicyTopicEntries
}
return nil
}
// Additional quota error details when there is QuotaError.
type QuotaErrorDetails struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// The rate scope of the quota limit.
RateScope QuotaErrorDetails_QuotaRateScope `protobuf:"varint,1,opt,name=rate_scope,json=rateScope,proto3,enum=google.ads.googleads.v8.errors.QuotaErrorDetails_QuotaRateScope" json:"rate_scope,omitempty"`
// The high level description of the quota bucket.
// Examples are "Get requests for standard access" or "Requests per account".
RateName string `protobuf:"bytes,2,opt,name=rate_name,json=rateName,proto3" json:"rate_name,omitempty"`
// Backoff period that customers should wait before sending next request.
RetryDelay *durationpb.Duration `protobuf:"bytes,3,opt,name=retry_delay,json=retryDelay,proto3" json:"retry_delay,omitempty"`
}
func (x *QuotaErrorDetails) Reset() {
*x = QuotaErrorDetails{}
if protoimpl.UnsafeEnabled {
mi := &file_google_ads_googleads_v8_errors_errors_proto_msgTypes[7]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *QuotaErrorDetails) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*QuotaErrorDetails) ProtoMessage() {}
func (x *QuotaErrorDetails) ProtoReflect() protoreflect.Message {
mi := &file_google_ads_googleads_v8_errors_errors_proto_msgTypes[7]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use QuotaErrorDetails.ProtoReflect.Descriptor instead.
func (*QuotaErrorDetails) Descriptor() ([]byte, []int) {
return file_google_ads_googleads_v8_errors_errors_proto_rawDescGZIP(), []int{7}
}
func (x *QuotaErrorDetails) GetRateScope() QuotaErrorDetails_QuotaRateScope {
if x != nil {
return x.RateScope
}
return QuotaErrorDetails_UNSPECIFIED
}
func (x *QuotaErrorDetails) GetRateName() string {
if x != nil {
return x.RateName
}
return ""
}
func (x *QuotaErrorDetails) GetRetryDelay() *durationpb.Duration {
if x != nil {
return x.RetryDelay
}
return nil
}
// Error details returned when an resource count limit was exceeded.
type ResourceCountDetails struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// The ID of the resource whose limit was exceeded.
// External customer ID if the limit is for a customer.
EnclosingId string `protobuf:"bytes,1,opt,name=enclosing_id,json=enclosingId,proto3" json:"enclosing_id,omitempty"`
// The name of the resource (Customer, Campaign etc.) whose limit was
// exceeded.
EnclosingResource string `protobuf:"bytes,5,opt,name=enclosing_resource,json=enclosingResource,proto3" json:"enclosing_resource,omitempty"`
// The limit which was exceeded.
Limit int32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"`
// The resource limit type which was exceeded.
LimitType enums.ResourceLimitTypeEnum_ResourceLimitType `protobuf:"varint,3,opt,name=limit_type,json=limitType,proto3,enum=google.ads.googleads.v8.enums.ResourceLimitTypeEnum_ResourceLimitType" json:"limit_type,omitempty"`
// The count of existing entities.
ExistingCount int32 `protobuf:"varint,4,opt,name=existing_count,json=existingCount,proto3" json:"existing_count,omitempty"`
}
func (x *ResourceCountDetails) Reset() {
*x = ResourceCountDetails{}
if protoimpl.UnsafeEnabled {
mi := &file_google_ads_googleads_v8_errors_errors_proto_msgTypes[8]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ResourceCountDetails) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ResourceCountDetails) ProtoMessage() {}
func (x *ResourceCountDetails) ProtoReflect() protoreflect.Message {
mi := &file_google_ads_googleads_v8_errors_errors_proto_msgTypes[8]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ResourceCountDetails.ProtoReflect.Descriptor instead.
func (*ResourceCountDetails) Descriptor() ([]byte, []int) {
return file_google_ads_googleads_v8_errors_errors_proto_rawDescGZIP(), []int{8}
}
func (x *ResourceCountDetails) GetEnclosingId() string {
if x != nil {
return x.EnclosingId
}
return ""
}
func (x *ResourceCountDetails) GetEnclosingResource() string {
if x != nil {
return x.EnclosingResource
}
return ""
}
func (x *ResourceCountDetails) GetLimit() int32 {
if x != nil {
return x.Limit
}
return 0
}
func (x *ResourceCountDetails) GetLimitType() enums.ResourceLimitTypeEnum_ResourceLimitType {
if x != nil {
return x.LimitType
}
return enums.ResourceLimitTypeEnum_UNSPECIFIED
}
func (x *ResourceCountDetails) GetExistingCount() int32 {
if x != nil {
return x.ExistingCount
}
return 0
}
// A part of a field path.
type ErrorLocation_FieldPathElement struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// The name of a field or a oneof
FieldName string `protobuf:"bytes,1,opt,name=field_name,json=fieldName,proto3" json:"field_name,omitempty"`
// If field_name is a repeated field, this is the element that failed
Index *int32 `protobuf:"varint,3,opt,name=index,proto3,oneof" json:"index,omitempty"`
}
func (x *ErrorLocation_FieldPathElement) Reset() {
*x = ErrorLocation_FieldPathElement{}
if protoimpl.UnsafeEnabled {
mi := &file_google_ads_googleads_v8_errors_errors_proto_msgTypes[9]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ErrorLocation_FieldPathElement) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ErrorLocation_FieldPathElement) ProtoMessage() {}
func (x *ErrorLocation_FieldPathElement) ProtoReflect() protoreflect.Message {
mi := &file_google_ads_googleads_v8_errors_errors_proto_msgTypes[9]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ErrorLocation_FieldPathElement.ProtoReflect.Descriptor instead.
func (*ErrorLocation_FieldPathElement) Descriptor() ([]byte, []int) {
return file_google_ads_googleads_v8_errors_errors_proto_rawDescGZIP(), []int{3, 0}
}
func (x *ErrorLocation_FieldPathElement) GetFieldName() string {
if x != nil {
return x.FieldName
}
return ""
}
func (x *ErrorLocation_FieldPathElement) GetIndex() int32 {
if x != nil && x.Index != nil {
return *x.Index
}
return 0
}
var File_google_ads_googleads_v8_errors_errors_proto protoreflect.FileDescriptor
var file_google_ads_googleads_v8_errors_errors_proto_rawDesc = []byte{
0x0a, 0x2b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x1e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x1a, 0x2b, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x70, 0x6f,
0x6c, 0x69, 0x63, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2a, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73,
0x2f, 0x76, 0x38, 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x76, 0x61, 0x6c, 0x75, 0x65,
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x37, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61,
0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f,
0x65, 0x6e, 0x75, 0x6d, 0x73, 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6c,
0x69, 0x6d, 0x69, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a,
0x3c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f,
0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x69, 0x6e, 0x76, 0x69, 0x74, 0x61, 0x74, 0x69, 0x6f,
0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x42, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x61, 0x63,
0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x62, 0x75, 0x64, 0x67, 0x65, 0x74, 0x5f, 0x70, 0x72, 0x6f,
0x70, 0x6f, 0x73, 0x61, 0x6c, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x1a, 0x37, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x73, 0x2f, 0x61, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x6c, 0x69, 0x6e, 0x6b, 0x5f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x38, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73,
0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x61, 0x64, 0x5f, 0x63, 0x75,
0x73, 0x74, 0x6f, 0x6d, 0x69, 0x7a, 0x65, 0x72, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2d, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73,
0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x73, 0x2f, 0x61, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x1a, 0x36, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x73, 0x2f, 0x61, 0x64, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x61, 0x64, 0x5f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x40, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64,
0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x61, 0x64, 0x5f, 0x67,
0x72, 0x6f, 0x75, 0x70, 0x5f, 0x62, 0x69, 0x64, 0x5f, 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65,
0x72, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x3d, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x61, 0x64,
0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x63, 0x72, 0x69, 0x74, 0x65, 0x72, 0x69, 0x6f, 0x6e,
0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x33, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61,
0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x61, 0x64, 0x5f,
0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x1a, 0x38, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x73, 0x2f, 0x61, 0x64, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x66, 0x65, 0x65, 0x64, 0x5f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x37, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64,
0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x61, 0x64, 0x5f, 0x70,
0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x35, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73,
0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x73, 0x2f, 0x61, 0x64, 0x5f, 0x73, 0x68, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x5f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64,
0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x61, 0x64, 0x78, 0x5f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x30, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64,
0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x61, 0x73, 0x73, 0x65,
0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x35, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x61, 0x73,
0x73, 0x65, 0x74, 0x5f, 0x6c, 0x69, 0x6e, 0x6b, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x39, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73,
0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x73, 0x2f, 0x61, 0x75, 0x74, 0x68, 0x65, 0x6e, 0x74, 0x69, 0x63, 0x61, 0x74,
0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a,
0x38, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f,
0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x34, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f,
0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f,
0x6a, 0x6f, 0x62, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a,
0x32, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f,
0x62, 0x69, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x1a, 0x3b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x73, 0x2f, 0x62, 0x69, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x73, 0x74, 0x72, 0x61,
0x74, 0x65, 0x67, 0x79, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x1a, 0x38, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2f, 0x62, 0x69, 0x6c, 0x6c, 0x69, 0x6e, 0x67, 0x5f, 0x73, 0x65, 0x74, 0x75, 0x70, 0x5f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x3a, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73,
0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x63, 0x61, 0x6d, 0x70, 0x61,
0x69, 0x67, 0x6e, 0x5f, 0x62, 0x75, 0x64, 0x67, 0x65, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x3d, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61,
0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x63, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x5f,
0x63, 0x72, 0x69, 0x74, 0x65, 0x72, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x39, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64,
0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x63, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x5f, 0x64,
0x72, 0x61, 0x66, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x1a, 0x33, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2f, 0x63, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x3e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64,
0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x63, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x5f, 0x65,
0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x38, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64,
0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x63, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x5f, 0x66,
0x65, 0x65, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a,
0x3e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f,
0x63, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x5f, 0x73, 0x68, 0x61, 0x72, 0x65, 0x64, 0x5f,
0x73, 0x65, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a,
0x37, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f,
0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x5f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x38, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76,
0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x5f,
0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x1a, 0x3a, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x73, 0x2f, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x69,
0x7a, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x32,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x63,
0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x1a, 0x3c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x73, 0x2f, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x61, 0x63,
0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x1a, 0x47, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2f, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x61, 0x64, 0x6a, 0x75,
0x73, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x45, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f,
0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72,
0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x76, 0x61, 0x72, 0x69,
0x61, 0x62, 0x6c, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x1a, 0x3c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2f, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x75, 0x70, 0x6c, 0x6f,
0x61, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x40,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x63,
0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f,
0x72, 0x75, 0x6c, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x1a, 0x44, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2f, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x76, 0x61, 0x6c, 0x75,
0x65, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x37, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61,
0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x72, 0x79, 0x5f, 0x63,
0x6f, 0x64, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a,
0x34, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f,
0x63, 0x72, 0x69, 0x74, 0x65, 0x72, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x38, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64,
0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x63,
0x6f, 0x64, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a,
0x3a, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f,
0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x61, 0x75, 0x64, 0x69, 0x65, 0x6e, 0x63, 0x65, 0x5f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x3a, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64,
0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x63, 0x75, 0x73, 0x74,
0x6f, 0x6d, 0x5f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x65, 0x73, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x3f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f,
0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38,
0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72,
0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6c, 0x69, 0x6e, 0x6b, 0x5f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x33, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76,
0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65,
0x72, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x38, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x63, 0x75,
0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x5f, 0x66, 0x65, 0x65, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x40, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f,
0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38,
0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72,
0x5f, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x5f, 0x6c, 0x69, 0x6e, 0x6b, 0x5f, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x3f, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f,
0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d,
0x65, 0x72, 0x5f, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x33, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73,
0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x64, 0x61, 0x74, 0x61, 0x62,
0x61, 0x73, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a,
0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f,
0x64, 0x61, 0x74, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x1a, 0x35, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2f, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x33, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f,
0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38,
0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x64, 0x69, 0x73, 0x74, 0x69, 0x6e, 0x63, 0x74,
0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2f, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61,
0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x65, 0x6e, 0x75,
0x6d, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x3e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x65, 0x78,
0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x66, 0x65, 0x65, 0x64, 0x5f, 0x69, 0x74, 0x65,
0x6d, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x3c, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x65, 0x78,
0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x5f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x43, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64,
0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x66, 0x65, 0x65, 0x64,
0x5f, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72,
0x65, 0x6e, 0x63, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x1a, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2f, 0x66, 0x65, 0x65, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x1a, 0x34, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x73, 0x2f, 0x66, 0x65, 0x65, 0x64, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x5f, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x38, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f,
0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38,
0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x66, 0x65, 0x65, 0x64, 0x5f, 0x69, 0x74, 0x65,
0x6d, 0x5f, 0x73, 0x65, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x1a, 0x3d, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x73, 0x2f, 0x66, 0x65, 0x65, 0x64, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x5f, 0x73, 0x65, 0x74, 0x5f,
0x6c, 0x69, 0x6e, 0x6b, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x1a, 0x3b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2f, 0x66, 0x65, 0x65, 0x64, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x5f, 0x74, 0x61, 0x72, 0x67, 0x65,
0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x3f, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x66, 0x65,
0x65, 0x64, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69,
0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x37,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x66,
0x65, 0x65, 0x64, 0x5f, 0x6d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x5f, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x30, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f,
0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38,
0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x35, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f,
0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f,
0x6d, 0x61, 0x73, 0x6b, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x1a, 0x33, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x3b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64,
0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70,
0x61, 0x72, 0x73, 0x69, 0x6e, 0x67, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x1a, 0x49, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x73, 0x2f, 0x67, 0x65, 0x6f, 0x5f, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x63, 0x6f,
0x6e, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x5f, 0x73, 0x75, 0x67, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f,
0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x31, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x68, 0x65,
0x61, 0x64, 0x65, 0x72, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x1a, 0x2d, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2f, 0x69, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a,
0x30, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f,
0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x1a, 0x33, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x73, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x32, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61,
0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x69, 0x6e, 0x76, 0x6f, 0x69, 0x63, 0x65, 0x5f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x40, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73,
0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x6b, 0x65, 0x79, 0x77, 0x6f,
0x72, 0x64, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x61, 0x64, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70,
0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x48, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61,
0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x6b, 0x65, 0x79,
0x77, 0x6f, 0x72, 0x64, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x61, 0x64, 0x5f, 0x67, 0x72, 0x6f,
0x75, 0x70, 0x5f, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x40, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61,
0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x5f, 0x70,
0x6c, 0x61, 0x6e, 0x5f, 0x63, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x5f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x48, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76,
0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64,
0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x63, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x5f, 0x6b,
0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x1a, 0x37, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x73, 0x2f, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x5f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x3c, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64,
0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x6b, 0x65, 0x79, 0x77,
0x6f, 0x72, 0x64, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x69, 0x64, 0x65, 0x61, 0x5f, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x30, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f,
0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x5f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x38, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64,
0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x6c, 0x61, 0x6e, 0x67,
0x75, 0x61, 0x67, 0x65, 0x5f, 0x63, 0x6f, 0x64, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x39, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64,
0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x6f, 0x70, 0x65, 0x72, 0x61,
0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x1a, 0x37, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2f, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x5f, 0x6c, 0x69, 0x6e, 0x6b, 0x5f, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x37, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f,
0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x5f,
0x62, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x1a, 0x35, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x73, 0x2f, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x37, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f,
0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x5f,
0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x1a, 0x35, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x73, 0x2f, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x69, 0x65, 0x72, 0x5f, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x31, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f,
0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x6d, 0x75, 0x74, 0x61, 0x74, 0x65,
0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x40, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61,
0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x6e, 0x65, 0x77,
0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x63, 0x72, 0x65, 0x61, 0x74, 0x69,
0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x3a,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x6e,
0x6f, 0x74, 0x5f, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x64, 0x5f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x34, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73,
0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x6e, 0x6f, 0x74, 0x5f, 0x65,
0x6d, 0x70, 0x74, 0x79, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x1a, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2f, 0x6e, 0x75, 0x6c, 0x6c, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x1a, 0x40, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x73, 0x2f, 0x6f, 0x66, 0x66, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x64,
0x61, 0x74, 0x61, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x1a, 0x42, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x73, 0x2f, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x61, 0x63,
0x63, 0x65, 0x73, 0x73, 0x5f, 0x64, 0x65, 0x6e, 0x69, 0x65, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x33, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f,
0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38,
0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72,
0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x3a, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61,
0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x70, 0x61, 0x72,
0x74, 0x69, 0x61, 0x6c, 0x5f, 0x66, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x5f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x3b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76,
0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x70, 0x61, 0x79, 0x6d, 0x65, 0x6e, 0x74,
0x73, 0x5f, 0x61, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x39, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64,
0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x5f, 0x66, 0x69, 0x6e,
0x64, 0x69, 0x6e, 0x67, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x1a, 0x46, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2f, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69,
0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x3b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76,
0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x5f,
0x76, 0x69, 0x6f, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x30, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64,
0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x30, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f,
0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38,
0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x71, 0x75, 0x6f, 0x74, 0x61, 0x5f, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x30, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f,
0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x5f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x35, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64,
0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x72, 0x65, 0x61, 0x63,
0x68, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x1a, 0x39, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x73, 0x2f, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x61, 0x74, 0x69, 0x6f,
0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x36, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x72, 0x65,
0x67, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x64, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x32, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64,
0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x5f, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x41, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f,
0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72,
0x63, 0x65, 0x5f, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x64, 0x65, 0x6e, 0x69, 0x65, 0x64,
0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x48, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61,
0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x72, 0x65, 0x73,
0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x6c, 0x69, 0x6d, 0x69,
0x74, 0x5f, 0x65, 0x78, 0x63, 0x65, 0x65, 0x64, 0x65, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x32, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61,
0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x5f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x3b, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73,
0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x73, 0x68, 0x61, 0x72, 0x65,
0x64, 0x5f, 0x63, 0x72, 0x69, 0x74, 0x65, 0x72, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x35, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f,
0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38,
0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x73, 0x68, 0x61, 0x72, 0x65, 0x64, 0x5f, 0x73,
0x65, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x35,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x73,
0x69, 0x7a, 0x65, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x38, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64,
0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x66, 0x6f, 0x72,
0x6d, 0x61, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a,
0x38, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f,
0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x5f, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x49, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f,
0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x74, 0x68, 0x69, 0x72, 0x64, 0x5f,
0x70, 0x61, 0x72, 0x74, 0x79, 0x5f, 0x61, 0x70, 0x70, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x74,
0x69, 0x63, 0x73, 0x5f, 0x6c, 0x69, 0x6e, 0x6b, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x34, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73,
0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x73, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x7a, 0x6f, 0x6e, 0x65, 0x5f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x34, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73,
0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x75, 0x72, 0x6c, 0x5f, 0x66,
0x69, 0x65, 0x6c, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x1a, 0x34, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2f, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x34, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61,
0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x6c, 0x69, 0x73, 0x74,
0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x45, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61,
0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2f, 0x79, 0x6f, 0x75,
0x74, 0x75, 0x62, 0x65, 0x5f, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x5f, 0x72, 0x65, 0x67, 0x69, 0x73,
0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x1a, 0x1e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x62, 0x75, 0x66, 0x2f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f,
0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x22, 0x79, 0x0a, 0x10, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x41, 0x64, 0x73, 0x46, 0x61,
0x69, 0x6c, 0x75, 0x72, 0x65, 0x12, 0x46, 0x0a, 0x06, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x18,
0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61,
0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x41, 0x64, 0x73,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, 0x06, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x12, 0x1d, 0x0a,
0x0a, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28,
0x09, 0x52, 0x09, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x49, 0x64, 0x22, 0xc8, 0x02, 0x0a,
0x0e, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x41, 0x64, 0x73, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12,
0x48, 0x0a, 0x0a, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20,
0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x73, 0x2e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x43, 0x6f, 0x64, 0x65, 0x52, 0x09,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x43, 0x6f, 0x64, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73,
0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73,
0x61, 0x67, 0x65, 0x12, 0x3f, 0x0a, 0x07, 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x18, 0x03,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64,
0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x63,
0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x07, 0x74, 0x72, 0x69,
0x67, 0x67, 0x65, 0x72, 0x12, 0x49, 0x0a, 0x08, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e,
0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e,
0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38,
0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x4c, 0x6f, 0x63,
0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12,
0x46, 0x0a, 0x07, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b,
0x32, 0x2c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x73, 0x2e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x07,
0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x82, 0x77, 0x0a, 0x09, 0x45, 0x72, 0x72, 0x6f,
0x72, 0x43, 0x6f, 0x64, 0x65, 0x12, 0x64, 0x0a, 0x0d, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3d, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x52, 0x65,
0x71, 0x75, 0x65, 0x73, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x52,
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0c, 0x72,
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x85, 0x01, 0x0a, 0x16,
0x62, 0x69, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x73, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79,
0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x4d, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x42, 0x69,
0x64, 0x64, 0x69, 0x6e, 0x67, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x42, 0x69, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x53, 0x74,
0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x14, 0x62,
0x69, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x45, 0x72,
0x72, 0x6f, 0x72, 0x12, 0x69, 0x0a, 0x0f, 0x75, 0x72, 0x6c, 0x5f, 0x66, 0x69, 0x65, 0x6c, 0x64,
0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3f, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x55, 0x72,
0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e,
0x55, 0x72, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52,
0x0d, 0x75, 0x72, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x7d,
0x0a, 0x14, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e,
0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x49, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x4c, 0x69,
0x73, 0x74, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69,
0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x12, 0x6c, 0x69, 0x73, 0x74, 0x4f,
0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x5c, 0x0a,
0x0b, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x05, 0x20, 0x01,
0x28, 0x0e, 0x32, 0x39, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x73, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e,
0x75, 0x6d, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52,
0x0a, 0x71, 0x75, 0x65, 0x72, 0x79, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x60, 0x0a, 0x0c, 0x6d,
0x75, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x07, 0x20, 0x01, 0x28,
0x0e, 0x32, 0x3b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x73, 0x2e, 0x4d, 0x75, 0x74, 0x61, 0x74, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e,
0x75, 0x6d, 0x2e, 0x4d, 0x75, 0x74, 0x61, 0x74, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00,
0x52, 0x0b, 0x6d, 0x75, 0x74, 0x61, 0x74, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x6d, 0x0a,
0x10, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x6d, 0x61, 0x73, 0x6b, 0x5f, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x41, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76,
0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x4d, 0x61,
0x73, 0x6b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x46, 0x69, 0x65, 0x6c,
0x64, 0x4d, 0x61, 0x73, 0x6b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0e, 0x66, 0x69,
0x65, 0x6c, 0x64, 0x4d, 0x61, 0x73, 0x6b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x7c, 0x0a, 0x13,
0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x49, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73,
0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x41, 0x75, 0x74, 0x68, 0x6f,
0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75,
0x6d, 0x2e, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x12, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a,
0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x68, 0x0a, 0x0e, 0x69, 0x6e,
0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x0a, 0x20, 0x01,
0x28, 0x0e, 0x32, 0x3f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x73, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x45, 0x72, 0x72, 0x6f,
0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x45, 0x72,
0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0d, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x12, 0x5c, 0x0a, 0x0b, 0x71, 0x75, 0x6f, 0x74, 0x61, 0x5f, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x39, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73,
0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x51, 0x75, 0x6f, 0x74, 0x61,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x51, 0x75, 0x6f, 0x74, 0x61, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0a, 0x71, 0x75, 0x6f, 0x74, 0x61, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x12, 0x50, 0x0a, 0x08, 0x61, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x0c,
0x20, 0x01, 0x28, 0x0e, 0x32, 0x33, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64,
0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x41, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75,
0x6d, 0x2e, 0x41, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x07, 0x61, 0x64, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x12, 0x65, 0x0a, 0x0e, 0x61, 0x64, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70,
0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3d, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x41, 0x64,
0x47, 0x72, 0x6f, 0x75, 0x70, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x41,
0x64, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0c, 0x61,
0x64, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x81, 0x01, 0x0a, 0x15,
0x63, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x5f, 0x62, 0x75, 0x64, 0x67, 0x65, 0x74, 0x5f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x4b, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61,
0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x43, 0x61, 0x6d,
0x70, 0x61, 0x69, 0x67, 0x6e, 0x42, 0x75, 0x64, 0x67, 0x65, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x43, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x42, 0x75, 0x64,
0x67, 0x65, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x13, 0x63, 0x61, 0x6d, 0x70,
0x61, 0x69, 0x67, 0x6e, 0x42, 0x75, 0x64, 0x67, 0x65, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12,
0x68, 0x0a, 0x0e, 0x63, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76,
0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x43, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67,
0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x43, 0x61, 0x6d, 0x70, 0x61,
0x69, 0x67, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0d, 0x63, 0x61, 0x6d, 0x70,
0x61, 0x69, 0x67, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x80, 0x01, 0x0a, 0x14, 0x61, 0x75,
0x74, 0x68, 0x65, 0x6e, 0x74, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x4b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e,
0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x41, 0x75, 0x74, 0x68, 0x65, 0x6e,
0x74, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75,
0x6d, 0x2e, 0x41, 0x75, 0x74, 0x68, 0x65, 0x6e, 0x74, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x13, 0x61, 0x75, 0x74, 0x68, 0x65, 0x6e, 0x74,
0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x8a, 0x01, 0x0a,
0x18, 0x61, 0x64, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x63, 0x72, 0x69, 0x74, 0x65, 0x72,
0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x12, 0x20, 0x01, 0x28, 0x0e, 0x32,
0x4f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2e, 0x41, 0x64, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x43, 0x72, 0x69, 0x74, 0x65, 0x72, 0x69, 0x6f,
0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x41, 0x64, 0x47, 0x72, 0x6f,
0x75, 0x70, 0x43, 0x72, 0x69, 0x74, 0x65, 0x72, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x48, 0x00, 0x52, 0x15, 0x61, 0x64, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x43, 0x72, 0x69, 0x74, 0x65,
0x72, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x79, 0x0a, 0x13, 0x61, 0x64, 0x5f,
0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x69, 0x7a, 0x65, 0x72, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x18, 0x13, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x47, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e,
0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38,
0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x41, 0x64, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d,
0x69, 0x7a, 0x65, 0x72, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x41, 0x64,
0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x69, 0x7a, 0x65, 0x72, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48,
0x00, 0x52, 0x11, 0x61, 0x64, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x69, 0x7a, 0x65, 0x72, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x12, 0x6e, 0x0a, 0x11, 0x61, 0x64, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70,
0x5f, 0x61, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x15, 0x20, 0x01, 0x28, 0x0e, 0x32,
0x41, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2e, 0x41, 0x64, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x41, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45,
0x6e, 0x75, 0x6d, 0x2e, 0x41, 0x64, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x41, 0x64, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x48, 0x00, 0x52, 0x0e, 0x61, 0x64, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x41, 0x64, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x12, 0x6d, 0x0a, 0x10, 0x61, 0x64, 0x5f, 0x73, 0x68, 0x61, 0x72, 0x69,
0x6e, 0x67, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x18, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x41,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e,
0x41, 0x64, 0x53, 0x68, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e,
0x75, 0x6d, 0x2e, 0x41, 0x64, 0x53, 0x68, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x45, 0x72, 0x72, 0x6f,
0x72, 0x48, 0x00, 0x52, 0x0e, 0x61, 0x64, 0x53, 0x68, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x45, 0x72,
0x72, 0x6f, 0x72, 0x12, 0x54, 0x0a, 0x09, 0x61, 0x64, 0x78, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x18, 0x19, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x35, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e,
0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38,
0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x41, 0x64, 0x78, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x41, 0x64, 0x78, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52,
0x08, 0x61, 0x64, 0x78, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x5c, 0x0a, 0x0b, 0x61, 0x73, 0x73,
0x65, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x6b, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x39,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e,
0x41, 0x73, 0x73, 0x65, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x41,
0x73, 0x73, 0x65, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0a, 0x61, 0x73, 0x73,
0x65, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x64, 0x0a, 0x0d, 0x62, 0x69, 0x64, 0x64, 0x69,
0x6e, 0x67, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x1a, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3d,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e,
0x42, 0x69, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d,
0x2e, 0x42, 0x69, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52,
0x0c, 0x62, 0x69, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x8d, 0x01,
0x0a, 0x18, 0x63, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x5f, 0x63, 0x72, 0x69, 0x74, 0x65,
0x72, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x1d, 0x20, 0x01, 0x28, 0x0e,
0x32, 0x51, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x73, 0x2e, 0x43, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x43, 0x72, 0x69, 0x74, 0x65, 0x72,
0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x43, 0x61, 0x6d,
0x70, 0x61, 0x69, 0x67, 0x6e, 0x43, 0x72, 0x69, 0x74, 0x65, 0x72, 0x69, 0x6f, 0x6e, 0x45, 0x72,
0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x16, 0x63, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x43,
0x72, 0x69, 0x74, 0x65, 0x72, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x81, 0x01,
0x0a, 0x15, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x69, 0x7a,
0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x1f, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x4b, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x43,
0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x69, 0x7a, 0x65, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f,
0x6e, 0x53, 0x69, 0x7a, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x13, 0x63, 0x6f,
0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x69, 0x7a, 0x65, 0x45, 0x72, 0x72, 0x6f,
0x72, 0x12, 0x75, 0x0a, 0x12, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x72, 0x79, 0x5f, 0x63, 0x6f, 0x64,
0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x6d, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x45, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x43,
0x6f, 0x75, 0x6e, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x64, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45,
0x6e, 0x75, 0x6d, 0x2e, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x64, 0x65, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x10, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x72, 0x79, 0x43,
0x6f, 0x64, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x6c, 0x0a, 0x0f, 0x63, 0x72, 0x69, 0x74,
0x65, 0x72, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x20, 0x20, 0x01, 0x28,
0x0e, 0x32, 0x41, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x73, 0x2e, 0x43, 0x72, 0x69, 0x74, 0x65, 0x72, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f,
0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x43, 0x72, 0x69, 0x74, 0x65, 0x72, 0x69, 0x6f, 0x6e, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0e, 0x63, 0x72, 0x69, 0x74, 0x65, 0x72, 0x69, 0x6f,
0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x68, 0x0a, 0x0e, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d,
0x65, 0x72, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x5a, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3f,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e,
0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75,
0x6d, 0x2e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48,
0x00, 0x52, 0x0d, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x12, 0x58, 0x0a, 0x0a, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x21,
0x20, 0x01, 0x28, 0x0e, 0x32, 0x37, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64,
0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x44, 0x61, 0x74, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45,
0x6e, 0x75, 0x6d, 0x2e, 0x44, 0x61, 0x74, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52,
0x09, 0x64, 0x61, 0x74, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x6d, 0x0a, 0x10, 0x64, 0x61,
0x74, 0x65, 0x5f, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x22,
0x20, 0x01, 0x28, 0x0e, 0x32, 0x41, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64,
0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x44, 0x61, 0x74, 0x65, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x44, 0x61, 0x74, 0x65, 0x52, 0x61, 0x6e,
0x67, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0e, 0x64, 0x61, 0x74, 0x65, 0x52,
0x61, 0x6e, 0x67, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x68, 0x0a, 0x0e, 0x64, 0x69, 0x73,
0x74, 0x69, 0x6e, 0x63, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x23, 0x20, 0x01, 0x28,
0x0e, 0x32, 0x3f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x73, 0x2e, 0x44, 0x69, 0x73, 0x74, 0x69, 0x6e, 0x63, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x44, 0x69, 0x73, 0x74, 0x69, 0x6e, 0x63, 0x74, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x48, 0x00, 0x52, 0x0d, 0x64, 0x69, 0x73, 0x74, 0x69, 0x6e, 0x63, 0x74, 0x45, 0x72,
0x72, 0x6f, 0x72, 0x12, 0xa2, 0x01, 0x0a, 0x1e, 0x66, 0x65, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x74,
0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65,
0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x24, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x5b, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x46, 0x65,
0x65, 0x64, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72,
0x65, 0x6e, 0x63, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x46, 0x65,
0x65, 0x64, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72,
0x65, 0x6e, 0x63, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x1b, 0x66, 0x65, 0x65,
0x64, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65,
0x6e, 0x63, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x68, 0x0a, 0x0e, 0x66, 0x75, 0x6e, 0x63,
0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x25, 0x20, 0x01, 0x28, 0x0e,
0x32, 0x3f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x73, 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45,
0x6e, 0x75, 0x6d, 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f,
0x72, 0x48, 0x00, 0x52, 0x0d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x12, 0x85, 0x01, 0x0a, 0x16, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f,
0x70, 0x61, 0x72, 0x73, 0x69, 0x6e, 0x67, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x26, 0x20,
0x01, 0x28, 0x0e, 0x32, 0x4d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x73, 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x72,
0x73, 0x69, 0x6e, 0x67, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x46, 0x75,
0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x72, 0x73, 0x69, 0x6e, 0x67, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x48, 0x00, 0x52, 0x14, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61,
0x72, 0x73, 0x69, 0x6e, 0x67, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x50, 0x0a, 0x08, 0x69, 0x64,
0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x27, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x33, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x49, 0x64,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x49, 0x64, 0x45, 0x72, 0x72, 0x6f,
0x72, 0x48, 0x00, 0x52, 0x07, 0x69, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x5c, 0x0a, 0x0b,
0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x28, 0x20, 0x01, 0x28,
0x0e, 0x32, 0x39, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x73, 0x2e, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75,
0x6d, 0x2e, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0a,
0x69, 0x6d, 0x61, 0x67, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x79, 0x0a, 0x13, 0x6c, 0x61,
0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x5f, 0x63, 0x6f, 0x64, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x18, 0x6e, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x47, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76,
0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67,
0x65, 0x43, 0x6f, 0x64, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x4c,
0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x43, 0x6f, 0x64, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x48, 0x00, 0x52, 0x11, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x43, 0x6f, 0x64, 0x65,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x75, 0x0a, 0x12, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x5f, 0x62,
0x75, 0x6e, 0x64, 0x6c, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x2a, 0x20, 0x01, 0x28,
0x0e, 0x32, 0x45, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x73, 0x2e, 0x4d, 0x65, 0x64, 0x69, 0x61, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x45, 0x72,
0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x4d, 0x65, 0x64, 0x69, 0x61, 0x42, 0x75, 0x6e,
0x64, 0x6c, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x10, 0x6d, 0x65, 0x64, 0x69,
0x61, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x75, 0x0a, 0x12,
0x6d, 0x65, 0x64, 0x69, 0x61, 0x5f, 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x18, 0x74, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x45, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e,
0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x4d, 0x65, 0x64, 0x69, 0x61, 0x55,
0x70, 0x6c, 0x6f, 0x61, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x4d,
0x65, 0x64, 0x69, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48,
0x00, 0x52, 0x10, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x45, 0x72,
0x72, 0x6f, 0x72, 0x12, 0x6d, 0x0a, 0x10, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x5f, 0x66, 0x69, 0x6c,
0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x56, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x41, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x4d,
0x65, 0x64, 0x69, 0x61, 0x46, 0x69, 0x6c, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75,
0x6d, 0x2e, 0x4d, 0x65, 0x64, 0x69, 0x61, 0x46, 0x69, 0x6c, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x48, 0x00, 0x52, 0x0e, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x46, 0x69, 0x6c, 0x65, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x12, 0x70, 0x0a, 0x10, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x69, 0x65, 0x72,
0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x2c, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x43, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x4d, 0x75,
0x6c, 0x74, 0x69, 0x70, 0x6c, 0x69, 0x65, 0x72, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75,
0x6d, 0x2e, 0x4d, 0x75, 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x69, 0x65, 0x72, 0x45, 0x72, 0x72, 0x6f,
0x72, 0x48, 0x00, 0x52, 0x0f, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x69, 0x65, 0x72, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x12, 0x96, 0x01, 0x0a, 0x1b, 0x6e, 0x65, 0x77, 0x5f, 0x72, 0x65, 0x73,
0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x63, 0x72, 0x65, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x18, 0x2d, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x55, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64,
0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x4e, 0x65, 0x77, 0x52,
0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x72, 0x65, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x4e, 0x65, 0x77, 0x52, 0x65, 0x73, 0x6f,
0x75, 0x72, 0x63, 0x65, 0x43, 0x72, 0x65, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f,
0x72, 0x48, 0x00, 0x52, 0x18, 0x6e, 0x65, 0x77, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
0x43, 0x72, 0x65, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x69, 0x0a,
0x0f, 0x6e, 0x6f, 0x74, 0x5f, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x18, 0x2e, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e,
0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38,
0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x4e, 0x6f, 0x74, 0x45, 0x6d, 0x70, 0x74, 0x79,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x4e, 0x6f, 0x74, 0x45, 0x6d, 0x70,
0x74, 0x79, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0d, 0x6e, 0x6f, 0x74, 0x45, 0x6d,
0x70, 0x74, 0x79, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x58, 0x0a, 0x0a, 0x6e, 0x75, 0x6c, 0x6c,
0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x2f, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x37, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x4e, 0x75,
0x6c, 0x6c, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x4e, 0x75, 0x6c, 0x6c,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x09, 0x6e, 0x75, 0x6c, 0x6c, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x12, 0x68, 0x0a, 0x0e, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x18, 0x30, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3f, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64,
0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x4f, 0x70, 0x65, 0x72,
0x61, 0x74, 0x6f, 0x72, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x4f, 0x70,
0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0d, 0x6f,
0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x5c, 0x0a, 0x0b,
0x72, 0x61, 0x6e, 0x67, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x31, 0x20, 0x01, 0x28,
0x0e, 0x32, 0x39, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x73, 0x2e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75,
0x6d, 0x2e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0a,
0x72, 0x61, 0x6e, 0x67, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x80, 0x01, 0x0a, 0x14, 0x72,
0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x18, 0x3a, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x4b, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73,
0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x6d,
0x6d, 0x65, 0x6e, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e,
0x75, 0x6d, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x64, 0x61, 0x74, 0x69, 0x6f,
0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x13, 0x72, 0x65, 0x63, 0x6f, 0x6d, 0x6d,
0x65, 0x6e, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x71, 0x0a,
0x11, 0x72, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x64, 0x65, 0x5f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x18, 0x33, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x43, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e,
0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x52, 0x65, 0x67, 0x69, 0x6f, 0x6e,
0x43, 0x6f, 0x64, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x52, 0x65,
0x67, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x64, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52,
0x0f, 0x72, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x64, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x12, 0x64, 0x0a, 0x0d, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x5f, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x18, 0x34, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76,
0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e,
0x67, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0c, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e,
0x67, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x79, 0x0a, 0x13, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67,
0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x35, 0x20,
0x01, 0x28, 0x0e, 0x32, 0x47, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x73, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x61,
0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e,
0x67, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x11,
0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x45, 0x72, 0x72, 0x6f,
0x72, 0x12, 0x79, 0x0a, 0x13, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x6c, 0x65, 0x6e, 0x67,
0x74, 0x68, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x36, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x47,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e,
0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x45, 0x72, 0x72, 0x6f,
0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x65, 0x6e, 0x67,
0x74, 0x68, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x11, 0x73, 0x74, 0x72, 0x69, 0x6e,
0x67, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x9e, 0x01, 0x0a,
0x1d, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x61, 0x63, 0x63, 0x65, 0x73,
0x73, 0x5f, 0x64, 0x65, 0x6e, 0x69, 0x65, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x37,
0x20, 0x01, 0x28, 0x0e, 0x32, 0x59, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64,
0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x41,
0x63, 0x63, 0x65, 0x73, 0x73, 0x44, 0x65, 0x6e, 0x69, 0x65, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x63,
0x63, 0x65, 0x73, 0x73, 0x44, 0x65, 0x6e, 0x69, 0x65, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48,
0x00, 0x52, 0x1a, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x63, 0x63, 0x65,
0x73, 0x73, 0x44, 0x65, 0x6e, 0x69, 0x65, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x9a, 0x01,
0x0a, 0x1c, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x61, 0x63, 0x63, 0x65, 0x73,
0x73, 0x5f, 0x64, 0x65, 0x6e, 0x69, 0x65, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x38,
0x20, 0x01, 0x28, 0x0e, 0x32, 0x57, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64,
0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x41, 0x63,
0x63, 0x65, 0x73, 0x73, 0x44, 0x65, 0x6e, 0x69, 0x65, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45,
0x6e, 0x75, 0x6d, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x41, 0x63, 0x63, 0x65,
0x73, 0x73, 0x44, 0x65, 0x6e, 0x69, 0x65, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52,
0x19, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x44,
0x65, 0x6e, 0x69, 0x65, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0xb3, 0x01, 0x0a, 0x23, 0x72,
0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x6c, 0x69,
0x6d, 0x69, 0x74, 0x5f, 0x65, 0x78, 0x63, 0x65, 0x65, 0x64, 0x65, 0x64, 0x5f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x18, 0x39, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x63, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e,
0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72,
0x63, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x45, 0x78, 0x63, 0x65,
0x65, 0x64, 0x65, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x52, 0x65,
0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x4c, 0x69, 0x6d, 0x69, 0x74,
0x45, 0x78, 0x63, 0x65, 0x65, 0x64, 0x65, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52,
0x1f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x4c, 0x69,
0x6d, 0x69, 0x74, 0x45, 0x78, 0x63, 0x65, 0x65, 0x64, 0x65, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x12, 0xaa, 0x01, 0x0a, 0x20, 0x79, 0x6f, 0x75, 0x74, 0x75, 0x62, 0x65, 0x5f, 0x76, 0x69, 0x64,
0x65, 0x6f, 0x5f, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x75, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x5f, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61,
0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x59, 0x6f, 0x75,
0x74, 0x75, 0x62, 0x65, 0x56, 0x69, 0x64, 0x65, 0x6f, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x72,
0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x59,
0x6f, 0x75, 0x74, 0x75, 0x62, 0x65, 0x56, 0x69, 0x64, 0x65, 0x6f, 0x52, 0x65, 0x67, 0x69, 0x73,
0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x1d,
0x79, 0x6f, 0x75, 0x74, 0x75, 0x62, 0x65, 0x56, 0x69, 0x64, 0x65, 0x6f, 0x52, 0x65, 0x67, 0x69,
0x73, 0x74, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x93, 0x01,
0x0a, 0x1b, 0x61, 0x64, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x62, 0x69, 0x64, 0x5f, 0x6d,
0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x72, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x3b, 0x20,
0x01, 0x28, 0x0e, 0x32, 0x53, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x73, 0x2e, 0x41, 0x64, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x42, 0x69, 0x64, 0x4d,
0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x72, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d,
0x2e, 0x41, 0x64, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x42, 0x69, 0x64, 0x4d, 0x6f, 0x64, 0x69, 0x66,
0x69, 0x65, 0x72, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x17, 0x61, 0x64, 0x47, 0x72,
0x6f, 0x75, 0x70, 0x42, 0x69, 0x64, 0x4d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x72, 0x45, 0x72,
0x72, 0x6f, 0x72, 0x12, 0x64, 0x0a, 0x0d, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x5f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x18, 0x3c, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3d, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64,
0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x43, 0x6f, 0x6e, 0x74,
0x65, 0x78, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x43, 0x6f, 0x6e,
0x74, 0x65, 0x78, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0c, 0x63, 0x6f, 0x6e,
0x74, 0x65, 0x78, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x5c, 0x0a, 0x0b, 0x66, 0x69, 0x65,
0x6c, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x3d, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x39,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e,
0x46, 0x69, 0x65, 0x6c, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x46,
0x69, 0x65, 0x6c, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0a, 0x66, 0x69, 0x65,
0x6c, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x6d, 0x0a, 0x10, 0x73, 0x68, 0x61, 0x72, 0x65,
0x64, 0x5f, 0x73, 0x65, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x3e, 0x20, 0x01, 0x28,
0x0e, 0x32, 0x41, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x73, 0x2e, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x53, 0x65, 0x74, 0x45, 0x72, 0x72, 0x6f,
0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x53, 0x65, 0x74, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0e, 0x73, 0x68, 0x61, 0x72, 0x65, 0x64, 0x53, 0x65,
0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x85, 0x01, 0x0a, 0x16, 0x73, 0x68, 0x61, 0x72, 0x65,
0x64, 0x5f, 0x63, 0x72, 0x69, 0x74, 0x65, 0x72, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x18, 0x3f, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x4d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76,
0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x43,
0x72, 0x69, 0x74, 0x65, 0x72, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75,
0x6d, 0x2e, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x43, 0x72, 0x69, 0x74, 0x65, 0x72, 0x69, 0x6f,
0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x14, 0x73, 0x68, 0x61, 0x72, 0x65, 0x64,
0x43, 0x72, 0x69, 0x74, 0x65, 0x72, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x8e,
0x01, 0x0a, 0x19, 0x63, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x5f, 0x73, 0x68, 0x61, 0x72,
0x65, 0x64, 0x5f, 0x73, 0x65, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x40, 0x20, 0x01,
0x28, 0x0e, 0x32, 0x51, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x73, 0x2e, 0x43, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x53, 0x68, 0x61, 0x72,
0x65, 0x64, 0x53, 0x65, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x43,
0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x53, 0x65, 0x74,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x16, 0x63, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67,
0x6e, 0x53, 0x68, 0x61, 0x72, 0x65, 0x64, 0x53, 0x65, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12,
0x89, 0x01, 0x0a, 0x17, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x61,
0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x41, 0x20, 0x01, 0x28,
0x0e, 0x32, 0x4f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x73, 0x2e, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x41, 0x63, 0x74,
0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x43, 0x6f, 0x6e,
0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x48, 0x00, 0x52, 0x15, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e,
0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0xb2, 0x01, 0x0a, 0x22,
0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x61, 0x64, 0x6a, 0x75, 0x73,
0x74, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x18, 0x73, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x63, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e,
0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72,
0x73, 0x69, 0x6f, 0x6e, 0x41, 0x64, 0x6a, 0x75, 0x73, 0x74, 0x6d, 0x65, 0x6e, 0x74, 0x55, 0x70,
0x6c, 0x6f, 0x61, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x43, 0x6f,
0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x41, 0x64, 0x6a, 0x75, 0x73, 0x74, 0x6d, 0x65,
0x6e, 0x74, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52,
0x1f, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x41, 0x64, 0x6a, 0x75, 0x73,
0x74, 0x6d, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x12, 0xab, 0x01, 0x0a, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f,
0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x8f, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x5f, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x43, 0x6f,
0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x56, 0x61,
0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e,
0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d,
0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52,
0x1d, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x43, 0x75, 0x73, 0x74, 0x6f,
0x6d, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x89,
0x01, 0x0a, 0x17, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x75, 0x70,
0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x6f, 0x20, 0x01, 0x28, 0x0e,
0x32, 0x4f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x73, 0x2e, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x55, 0x70, 0x6c, 0x6f,
0x61, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x43, 0x6f, 0x6e, 0x76,
0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x45, 0x72, 0x72, 0x6f,
0x72, 0x48, 0x00, 0x52, 0x15, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x55,
0x70, 0x6c, 0x6f, 0x61, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x97, 0x01, 0x0a, 0x1b, 0x63,
0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f,
0x72, 0x75, 0x6c, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x91, 0x01, 0x20, 0x01, 0x28,
0x0e, 0x32, 0x55, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x73, 0x2e, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x56, 0x61, 0x6c,
0x75, 0x65, 0x52, 0x75, 0x6c, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e,
0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52,
0x75, 0x6c, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x18, 0x63, 0x6f, 0x6e, 0x76,
0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x75, 0x6c, 0x65, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x12, 0xa4, 0x01, 0x0a, 0x1f, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73,
0x69, 0x6f, 0x6e, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x5f, 0x73,
0x65, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x92, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32,
0x5b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2e, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, 0x65,
0x52, 0x75, 0x6c, 0x65, 0x53, 0x65, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d,
0x2e, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, 0x65,
0x52, 0x75, 0x6c, 0x65, 0x53, 0x65, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x1b,
0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52,
0x75, 0x6c, 0x65, 0x53, 0x65, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x60, 0x0a, 0x0c, 0x68,
0x65, 0x61, 0x64, 0x65, 0x72, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x42, 0x20, 0x01, 0x28,
0x0e, 0x32, 0x3b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x73, 0x2e, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e,
0x75, 0x6d, 0x2e, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00,
0x52, 0x0b, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x68, 0x0a,
0x0e, 0x64, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18,
0x43, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61,
0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73,
0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0d, 0x64, 0x61, 0x74, 0x61, 0x62, 0x61,
0x73, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x7d, 0x0a, 0x14, 0x70, 0x6f, 0x6c, 0x69, 0x63,
0x79, 0x5f, 0x66, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18,
0x44, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x49, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61,
0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x46, 0x69, 0x6e,
0x64, 0x69, 0x6e, 0x67, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x50, 0x6f,
0x6c, 0x69, 0x63, 0x79, 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x48, 0x00, 0x52, 0x12, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e,
0x67, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x58, 0x0a, 0x0a, 0x65, 0x6e, 0x75, 0x6d, 0x5f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x18, 0x46, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x37, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64,
0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x45, 0x6e, 0x75, 0x6d,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x45, 0x6e, 0x75, 0x6d, 0x45, 0x72,
0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x09, 0x65, 0x6e, 0x75, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x12, 0x75, 0x0a, 0x12, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x5f, 0x70, 0x6c, 0x61, 0x6e,
0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x47, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x45, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x4b, 0x65,
0x79, 0x77, 0x6f, 0x72, 0x64, 0x50, 0x6c, 0x61, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e,
0x75, 0x6d, 0x2e, 0x4b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x50, 0x6c, 0x61, 0x6e, 0x45, 0x72,
0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x10, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x50, 0x6c,
0x61, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x96, 0x01, 0x0a, 0x1b, 0x6b, 0x65, 0x79, 0x77,
0x6f, 0x72, 0x64, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x63, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67,
0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x48, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x55, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x4b,
0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x50, 0x6c, 0x61, 0x6e, 0x43, 0x61, 0x6d, 0x70, 0x61, 0x69,
0x67, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x4b, 0x65, 0x79, 0x77,
0x6f, 0x72, 0x64, 0x50, 0x6c, 0x61, 0x6e, 0x43, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x18, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x50,
0x6c, 0x61, 0x6e, 0x43, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x12, 0xb4, 0x01, 0x0a, 0x23, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x5f, 0x70, 0x6c, 0x61,
0x6e, 0x5f, 0x63, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x5f, 0x6b, 0x65, 0x79, 0x77, 0x6f,
0x72, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x84, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32,
0x63, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2e, 0x4b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x50, 0x6c, 0x61, 0x6e, 0x43, 0x61, 0x6d, 0x70,
0x61, 0x69, 0x67, 0x6e, 0x4b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x4b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x50, 0x6c, 0x61, 0x6e,
0x43, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x4b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x1f, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x50,
0x6c, 0x61, 0x6e, 0x43, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x4b, 0x65, 0x79, 0x77, 0x6f,
0x72, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x93, 0x01, 0x0a, 0x1b, 0x6b, 0x65, 0x79, 0x77,
0x6f, 0x72, 0x64, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x61, 0x64, 0x5f, 0x67, 0x72, 0x6f, 0x75,
0x70, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x4a, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x53, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x4b,
0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x50, 0x6c, 0x61, 0x6e, 0x41, 0x64, 0x47, 0x72, 0x6f, 0x75,
0x70, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x4b, 0x65, 0x79, 0x77, 0x6f,
0x72, 0x64, 0x50, 0x6c, 0x61, 0x6e, 0x41, 0x64, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x48, 0x00, 0x52, 0x17, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x50, 0x6c, 0x61,
0x6e, 0x41, 0x64, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0xb1, 0x01,
0x0a, 0x23, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x61,
0x64, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x5f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x85, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x61, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x4b, 0x65,
0x79, 0x77, 0x6f, 0x72, 0x64, 0x50, 0x6c, 0x61, 0x6e, 0x41, 0x64, 0x47, 0x72, 0x6f, 0x75, 0x70,
0x4b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d,
0x2e, 0x4b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x50, 0x6c, 0x61, 0x6e, 0x41, 0x64, 0x47, 0x72,
0x6f, 0x75, 0x70, 0x4b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48,
0x00, 0x52, 0x1e, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x50, 0x6c, 0x61, 0x6e, 0x41, 0x64,
0x47, 0x72, 0x6f, 0x75, 0x70, 0x4b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x45, 0x72, 0x72, 0x6f,
0x72, 0x12, 0x86, 0x01, 0x0a, 0x17, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x5f, 0x70, 0x6c,
0x61, 0x6e, 0x5f, 0x69, 0x64, 0x65, 0x61, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x4c, 0x20,
0x01, 0x28, 0x0e, 0x32, 0x4d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x73, 0x2e, 0x4b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x50, 0x6c, 0x61, 0x6e,
0x49, 0x64, 0x65, 0x61, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x4b, 0x65,
0x79, 0x77, 0x6f, 0x72, 0x64, 0x50, 0x6c, 0x61, 0x6e, 0x49, 0x64, 0x65, 0x61, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x48, 0x00, 0x52, 0x14, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x50, 0x6c, 0x61,
0x6e, 0x49, 0x64, 0x65, 0x61, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x9e, 0x01, 0x0a, 0x1d, 0x61,
0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x62, 0x75, 0x64, 0x67, 0x65, 0x74, 0x5f, 0x70, 0x72,
0x6f, 0x70, 0x6f, 0x73, 0x61, 0x6c, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x4d, 0x20, 0x01,
0x28, 0x0e, 0x32, 0x59, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x73, 0x2e, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x42, 0x75, 0x64, 0x67, 0x65,
0x74, 0x50, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x61, 0x6c, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e,
0x75, 0x6d, 0x2e, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x42, 0x75, 0x64, 0x67, 0x65, 0x74,
0x50, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x61, 0x6c, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52,
0x1a, 0x61, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x42, 0x75, 0x64, 0x67, 0x65, 0x74, 0x50, 0x72,
0x6f, 0x70, 0x6f, 0x73, 0x61, 0x6c, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x69, 0x0a, 0x0f, 0x75,
0x73, 0x65, 0x72, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x4e,
0x20, 0x01, 0x28, 0x0e, 0x32, 0x3f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64,
0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x4c, 0x69, 0x73, 0x74, 0x45, 0x72,
0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x4c, 0x69, 0x73, 0x74,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0d, 0x75, 0x73, 0x65, 0x72, 0x4c, 0x69, 0x73,
0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x76, 0x0a, 0x12, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65,
0x5f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x88, 0x01, 0x20,
0x01, 0x28, 0x0e, 0x32, 0x45, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x73, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65,
0x45, 0x76, 0x65, 0x6e, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x10, 0x63, 0x68,
0x61, 0x6e, 0x67, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x79,
0x0a, 0x13, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x5f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x4f, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x47, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61,
0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x43, 0x68, 0x61,
0x6e, 0x67, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e,
0x75, 0x6d, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x11, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x53, 0x74,
0x61, 0x74, 0x75, 0x73, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x58, 0x0a, 0x0a, 0x66, 0x65, 0x65,
0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x50, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x37, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x46,
0x65, 0x65, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x46, 0x65, 0x65,
0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x09, 0x66, 0x65, 0x65, 0x64, 0x45, 0x72,
0x72, 0x6f, 0x72, 0x12, 0xb7, 0x01, 0x0a, 0x24, 0x67, 0x65, 0x6f, 0x5f, 0x74, 0x61, 0x72, 0x67,
0x65, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x5f, 0x73, 0x75, 0x67, 0x67,
0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x51, 0x20, 0x01,
0x28, 0x0e, 0x32, 0x65, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x73, 0x2e, 0x47, 0x65, 0x6f, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x43, 0x6f, 0x6e,
0x73, 0x74, 0x61, 0x6e, 0x74, 0x53, 0x75, 0x67, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x47, 0x65, 0x6f, 0x54, 0x61, 0x72, 0x67,
0x65, 0x74, 0x43, 0x6f, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x53, 0x75, 0x67, 0x67, 0x65, 0x73,
0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x20, 0x67, 0x65, 0x6f,
0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x53, 0x75,
0x67, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x7d, 0x0a,
0x14, 0x63, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x5f, 0x64, 0x72, 0x61, 0x66, 0x74, 0x5f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x52, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x49, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61,
0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x43, 0x61, 0x6d,
0x70, 0x61, 0x69, 0x67, 0x6e, 0x44, 0x72, 0x61, 0x66, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45,
0x6e, 0x75, 0x6d, 0x2e, 0x43, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x44, 0x72, 0x61, 0x66,
0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x12, 0x63, 0x61, 0x6d, 0x70, 0x61, 0x69,
0x67, 0x6e, 0x44, 0x72, 0x61, 0x66, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x69, 0x0a, 0x0f,
0x66, 0x65, 0x65, 0x64, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18,
0x53, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61,
0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x46, 0x65, 0x65, 0x64, 0x49, 0x74, 0x65, 0x6d, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x46, 0x65, 0x65, 0x64, 0x49, 0x74, 0x65,
0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0d, 0x66, 0x65, 0x65, 0x64, 0x49, 0x74,
0x65, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x5c, 0x0a, 0x0b, 0x6c, 0x61, 0x62, 0x65, 0x6c,
0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x54, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x39, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x4c, 0x61,
0x62, 0x65, 0x6c, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x4c, 0x61, 0x62,
0x65, 0x6c, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0a, 0x6c, 0x61, 0x62, 0x65, 0x6c,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x79, 0x0a, 0x13, 0x62, 0x69, 0x6c, 0x6c, 0x69, 0x6e, 0x67,
0x5f, 0x73, 0x65, 0x74, 0x75, 0x70, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x57, 0x20, 0x01,
0x28, 0x0e, 0x32, 0x47, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x73, 0x2e, 0x42, 0x69, 0x6c, 0x6c, 0x69, 0x6e, 0x67, 0x53, 0x65, 0x74, 0x75, 0x70,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x42, 0x69, 0x6c, 0x6c, 0x69, 0x6e,
0x67, 0x53, 0x65, 0x74, 0x75, 0x70, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x11, 0x62,
0x69, 0x6c, 0x6c, 0x69, 0x6e, 0x67, 0x53, 0x65, 0x74, 0x75, 0x70, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x12, 0x92, 0x01, 0x0a, 0x1a, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x5f, 0x63, 0x6c,
0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6c, 0x69, 0x6e, 0x6b, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18,
0x58, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x53, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61,
0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x43,
0x6c, 0x69, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x6e, 0x6b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e,
0x75, 0x6d, 0x2e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x43, 0x6c, 0x69, 0x65, 0x6e,
0x74, 0x4c, 0x69, 0x6e, 0x6b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x17, 0x63, 0x75,
0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x6e, 0x6b,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x96, 0x01, 0x0a, 0x1b, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d,
0x65, 0x72, 0x5f, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x5f, 0x6c, 0x69, 0x6e, 0x6b, 0x5f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x5b, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x55, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61,
0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x43, 0x75, 0x73,
0x74, 0x6f, 0x6d, 0x65, 0x72, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x4c, 0x69, 0x6e, 0x6b,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d,
0x65, 0x72, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x4c, 0x69, 0x6e, 0x6b, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x48, 0x00, 0x52, 0x18, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x4d, 0x61,
0x6e, 0x61, 0x67, 0x65, 0x72, 0x4c, 0x69, 0x6e, 0x6b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x75,
0x0a, 0x12, 0x66, 0x65, 0x65, 0x64, 0x5f, 0x6d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x5f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x18, 0x5c, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x45, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64,
0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x46, 0x65, 0x65, 0x64,
0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d,
0x2e, 0x46, 0x65, 0x65, 0x64, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x45, 0x72, 0x72, 0x6f,
0x72, 0x48, 0x00, 0x52, 0x10, 0x66, 0x65, 0x65, 0x64, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x79, 0x0a, 0x13, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65,
0x72, 0x5f, 0x66, 0x65, 0x65, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x5d, 0x20, 0x01,
0x28, 0x0e, 0x32, 0x47, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x73, 0x2e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x46, 0x65, 0x65, 0x64,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d,
0x65, 0x72, 0x46, 0x65, 0x65, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x11, 0x63,
0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x46, 0x65, 0x65, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x12, 0x76, 0x0a, 0x13, 0x61, 0x64, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x66, 0x65, 0x65,
0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x5e, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x45, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x41,
0x64, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x46, 0x65, 0x65, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45,
0x6e, 0x75, 0x6d, 0x2e, 0x41, 0x64, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x46, 0x65, 0x65, 0x64, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x10, 0x61, 0x64, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x46,
0x65, 0x65, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x79, 0x0a, 0x13, 0x63, 0x61, 0x6d, 0x70,
0x61, 0x69, 0x67, 0x6e, 0x5f, 0x66, 0x65, 0x65, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18,
0x60, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x47, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61,
0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x43, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x46,
0x65, 0x65, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x43, 0x61, 0x6d,
0x70, 0x61, 0x69, 0x67, 0x6e, 0x46, 0x65, 0x65, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00,
0x52, 0x11, 0x63, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x46, 0x65, 0x65, 0x64, 0x45, 0x72,
0x72, 0x6f, 0x72, 0x12, 0x81, 0x01, 0x0a, 0x15, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x69,
0x6e, 0x74, 0x65, 0x72, 0x65, 0x73, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x61, 0x20,
0x01, 0x28, 0x0e, 0x32, 0x4b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x73, 0x2e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x49, 0x6e, 0x74, 0x65, 0x72,
0x65, 0x73, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x43, 0x75, 0x73,
0x74, 0x6f, 0x6d, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x65, 0x73, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x48, 0x00, 0x52, 0x13, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x65,
0x73, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x91, 0x01, 0x0a, 0x19, 0x63, 0x61, 0x6d, 0x70,
0x61, 0x69, 0x67, 0x6e, 0x5f, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x5f,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x62, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x53, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61,
0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x43, 0x61, 0x6d,
0x70, 0x61, 0x69, 0x67, 0x6e, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x43, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67,
0x6e, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x48, 0x00, 0x52, 0x17, 0x63, 0x61, 0x6d, 0x70, 0x61, 0x69, 0x67, 0x6e, 0x45, 0x78, 0x70, 0x65,
0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x8e, 0x01, 0x0a, 0x19,
0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x66, 0x65, 0x65, 0x64, 0x5f, 0x69,
0x74, 0x65, 0x6d, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x64, 0x20, 0x01, 0x28, 0x0e, 0x32,
0x51, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2e, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x46, 0x65, 0x65, 0x64, 0x49, 0x74,
0x65, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x45, 0x78, 0x74, 0x65,
0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x46, 0x65, 0x65, 0x64, 0x49, 0x74, 0x65, 0x6d, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x48, 0x00, 0x52, 0x16, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x46,
0x65, 0x65, 0x64, 0x49, 0x74, 0x65, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x75, 0x0a, 0x12,
0x61, 0x64, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x18, 0x65, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x45, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e,
0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x41, 0x64, 0x50, 0x61, 0x72, 0x61,
0x6d, 0x65, 0x74, 0x65, 0x72, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x41,
0x64, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48,
0x00, 0x52, 0x10, 0x61, 0x64, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x45, 0x72,
0x72, 0x6f, 0x72, 0x12, 0x92, 0x01, 0x0a, 0x1a, 0x66, 0x65, 0x65, 0x64, 0x5f, 0x69, 0x74, 0x65,
0x6d, 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x18, 0x66, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x53, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e,
0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x46, 0x65, 0x65, 0x64, 0x49, 0x74,
0x65, 0x6d, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f,
0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x46, 0x65, 0x65, 0x64, 0x49, 0x74, 0x65, 0x6d, 0x56, 0x61,
0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52,
0x17, 0x66, 0x65, 0x65, 0x64, 0x49, 0x74, 0x65, 0x6d, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74,
0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x89, 0x01, 0x0a, 0x17, 0x65, 0x78, 0x74,
0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x5f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x18, 0x67, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x4f, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64,
0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x45, 0x78, 0x74, 0x65,
0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x45, 0x72, 0x72, 0x6f,
0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x53,
0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x15, 0x65,
0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x12, 0x77, 0x0a, 0x13, 0x66, 0x65, 0x65, 0x64, 0x5f, 0x69, 0x74, 0x65,
0x6d, 0x5f, 0x73, 0x65, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x8c, 0x01, 0x20, 0x01,
0x28, 0x0e, 0x32, 0x45, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x73, 0x2e, 0x46, 0x65, 0x65, 0x64, 0x49, 0x74, 0x65, 0x6d, 0x53, 0x65, 0x74, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x46, 0x65, 0x65, 0x64, 0x49, 0x74, 0x65,
0x6d, 0x53, 0x65, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x10, 0x66, 0x65, 0x65,
0x64, 0x49, 0x74, 0x65, 0x6d, 0x53, 0x65, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x88, 0x01,
0x0a, 0x18, 0x66, 0x65, 0x65, 0x64, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x5f, 0x73, 0x65, 0x74, 0x5f,
0x6c, 0x69, 0x6e, 0x6b, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x8d, 0x01, 0x20, 0x01, 0x28,
0x0e, 0x32, 0x4d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x73, 0x2e, 0x46, 0x65, 0x65, 0x64, 0x49, 0x74, 0x65, 0x6d, 0x53, 0x65, 0x74, 0x4c, 0x69,
0x6e, 0x6b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x46, 0x65, 0x65, 0x64,
0x49, 0x74, 0x65, 0x6d, 0x53, 0x65, 0x74, 0x4c, 0x69, 0x6e, 0x6b, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x48, 0x00, 0x52, 0x14, 0x66, 0x65, 0x65, 0x64, 0x49, 0x74, 0x65, 0x6d, 0x53, 0x65, 0x74, 0x4c,
0x69, 0x6e, 0x6b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x82, 0x01, 0x0a, 0x16, 0x66, 0x65, 0x65,
0x64, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x5f, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x18, 0x68, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x4b, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73,
0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x46, 0x65, 0x65, 0x64, 0x49,
0x74, 0x65, 0x6d, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e,
0x75, 0x6d, 0x2e, 0x46, 0x65, 0x65, 0x64, 0x49, 0x74, 0x65, 0x6d, 0x54, 0x61, 0x72, 0x67, 0x65,
0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x13, 0x66, 0x65, 0x65, 0x64, 0x49, 0x74,
0x65, 0x6d, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x85, 0x01,
0x0a, 0x16, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x5f, 0x76, 0x69, 0x6f, 0x6c, 0x61, 0x74, 0x69,
0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x69, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x4d,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e,
0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x56, 0x69, 0x6f, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x56,
0x69, 0x6f, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52,
0x14, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x56, 0x69, 0x6f, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x81, 0x01, 0x0a, 0x15, 0x70, 0x61, 0x72, 0x74, 0x69, 0x61,
0x6c, 0x5f, 0x66, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18,
0x70, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x4b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61,
0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x50, 0x61, 0x72, 0x74, 0x69, 0x61, 0x6c, 0x46, 0x61,
0x69, 0x6c, 0x75, 0x72, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x50,
0x61, 0x72, 0x74, 0x69, 0x61, 0x6c, 0x46, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x48, 0x00, 0x52, 0x13, 0x70, 0x61, 0x72, 0x74, 0x69, 0x61, 0x6c, 0x46, 0x61, 0x69,
0x6c, 0x75, 0x72, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0xae, 0x01, 0x0a, 0x21, 0x70, 0x6f,
0x6c, 0x69, 0x63, 0x79, 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f,
0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18,
0x72, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x61, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61,
0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x56, 0x61, 0x6c,
0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79,
0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65,
0x74, 0x65, 0x72, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x1e, 0x70, 0x6f, 0x6c, 0x69,
0x63, 0x79, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x72, 0x61,
0x6d, 0x65, 0x74, 0x65, 0x72, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x6d, 0x0a, 0x10, 0x73, 0x69,
0x7a, 0x65, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x76,
0x20, 0x01, 0x28, 0x0e, 0x32, 0x41, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64,
0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x53, 0x69, 0x7a, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x53, 0x69, 0x7a, 0x65, 0x4c, 0x69, 0x6d,
0x69, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0e, 0x73, 0x69, 0x7a, 0x65, 0x4c,
0x69, 0x6d, 0x69, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x93, 0x01, 0x0a, 0x1b, 0x6f, 0x66,
0x66, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f,
0x6a, 0x6f, 0x62, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x77, 0x20, 0x01, 0x28, 0x0e, 0x32,
0x53, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2e, 0x4f, 0x66, 0x66, 0x6c, 0x69, 0x6e, 0x65, 0x55, 0x73, 0x65, 0x72, 0x44, 0x61, 0x74, 0x61,
0x4a, 0x6f, 0x62, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x4f, 0x66, 0x66,
0x6c, 0x69, 0x6e, 0x65, 0x55, 0x73, 0x65, 0x72, 0x44, 0x61, 0x74, 0x61, 0x4a, 0x6f, 0x62, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x17, 0x6f, 0x66, 0x66, 0x6c, 0x69, 0x6e, 0x65, 0x55,
0x73, 0x65, 0x72, 0x44, 0x61, 0x74, 0x61, 0x4a, 0x6f, 0x62, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12,
0x82, 0x01, 0x0a, 0x15, 0x6e, 0x6f, 0x74, 0x5f, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x6c, 0x69, 0x73,
0x74, 0x65, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x89, 0x01, 0x20, 0x01, 0x28, 0x0e,
0x32, 0x4b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x73, 0x2e, 0x4e, 0x6f, 0x74, 0x41, 0x6c, 0x6c, 0x6f, 0x77, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x64,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x4e, 0x6f, 0x74, 0x41, 0x6c, 0x6c,
0x6f, 0x77, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52,
0x13, 0x6e, 0x6f, 0x74, 0x41, 0x6c, 0x6c, 0x6f, 0x77, 0x6c, 0x69, 0x73, 0x74, 0x65, 0x64, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x12, 0x75, 0x0a, 0x12, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x5f,
0x6c, 0x69, 0x6e, 0x6b, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x79, 0x20, 0x01, 0x28, 0x0e,
0x32, 0x45, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x73, 0x2e, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x4c, 0x69, 0x6e, 0x6b, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x4c, 0x69,
0x6e, 0x6b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x10, 0x6d, 0x61, 0x6e, 0x61, 0x67,
0x65, 0x72, 0x4c, 0x69, 0x6e, 0x6b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x79, 0x0a, 0x13, 0x63,
0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x63, 0x6f, 0x64, 0x65, 0x5f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x18, 0x7a, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x47, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e,
0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x43, 0x75, 0x72, 0x72, 0x65, 0x6e,
0x63, 0x79, 0x43, 0x6f, 0x64, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e,
0x43, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x43, 0x6f, 0x64, 0x65, 0x45, 0x72, 0x72, 0x6f,
0x72, 0x48, 0x00, 0x52, 0x11, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x43, 0x6f, 0x64,
0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x89, 0x01, 0x0a, 0x17, 0x61, 0x63, 0x63, 0x65, 0x73,
0x73, 0x5f, 0x69, 0x6e, 0x76, 0x69, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x18, 0x7c, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x4f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e,
0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73,
0x49, 0x6e, 0x76, 0x69, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45,
0x6e, 0x75, 0x6d, 0x2e, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x49, 0x6e, 0x76, 0x69, 0x74, 0x61,
0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x15, 0x61, 0x63, 0x63,
0x65, 0x73, 0x73, 0x49, 0x6e, 0x76, 0x69, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x12, 0x6d, 0x0a, 0x10, 0x72, 0x65, 0x61, 0x63, 0x68, 0x5f, 0x70, 0x6c, 0x61, 0x6e,
0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x7d, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x41, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x52, 0x65,
0x61, 0x63, 0x68, 0x50, 0x6c, 0x61, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d,
0x2e, 0x52, 0x65, 0x61, 0x63, 0x68, 0x50, 0x6c, 0x61, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48,
0x00, 0x52, 0x0e, 0x72, 0x65, 0x61, 0x63, 0x68, 0x50, 0x6c, 0x61, 0x6e, 0x45, 0x72, 0x72, 0x6f,
0x72, 0x12, 0x64, 0x0a, 0x0d, 0x69, 0x6e, 0x76, 0x6f, 0x69, 0x63, 0x65, 0x5f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x18, 0x7e, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e,
0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x49, 0x6e, 0x76, 0x6f, 0x69, 0x63,
0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x49, 0x6e, 0x76, 0x6f, 0x69,
0x63, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0c, 0x69, 0x6e, 0x76, 0x6f, 0x69,
0x63, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x85, 0x01, 0x0a, 0x16, 0x70, 0x61, 0x79, 0x6d,
0x65, 0x6e, 0x74, 0x73, 0x5f, 0x61, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x18, 0x7f, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x4d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e,
0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x50, 0x61, 0x79, 0x6d, 0x65, 0x6e,
0x74, 0x73, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e,
0x75, 0x6d, 0x2e, 0x50, 0x61, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x41, 0x63, 0x63, 0x6f, 0x75,
0x6e, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x14, 0x70, 0x61, 0x79, 0x6d, 0x65,
0x6e, 0x74, 0x73, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12,
0x6a, 0x0a, 0x0f, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x7a, 0x6f, 0x6e, 0x65, 0x5f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x18, 0x80, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3f, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73,
0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x5a,
0x6f, 0x6e, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x54, 0x69, 0x6d,
0x65, 0x5a, 0x6f, 0x6e, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0d, 0x74, 0x69,
0x6d, 0x65, 0x5a, 0x6f, 0x6e, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x6e, 0x0a, 0x10, 0x61,
0x73, 0x73, 0x65, 0x74, 0x5f, 0x6c, 0x69, 0x6e, 0x6b, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18,
0x81, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x41, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e,
0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38,
0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x41, 0x73, 0x73, 0x65, 0x74, 0x4c, 0x69, 0x6e,
0x6b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x41, 0x73, 0x73, 0x65, 0x74,
0x4c, 0x69, 0x6e, 0x6b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0e, 0x61, 0x73, 0x73,
0x65, 0x74, 0x4c, 0x69, 0x6e, 0x6b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x6a, 0x0a, 0x0f, 0x75,
0x73, 0x65, 0x72, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x82,
0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x3f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61,
0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e,
0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x44, 0x61, 0x74, 0x61, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x44, 0x61, 0x74,
0x61, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x0d, 0x75, 0x73, 0x65, 0x72, 0x44, 0x61,
0x74, 0x61, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x6a, 0x0a, 0x0f, 0x62, 0x61, 0x74, 0x63, 0x68,
0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x83, 0x01, 0x20, 0x01, 0x28,
0x0e, 0x32, 0x3f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f,
0x72, 0x73, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x4a, 0x6f, 0x62, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x4a, 0x6f, 0x62, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x48, 0x00, 0x52, 0x0d, 0x62, 0x61, 0x74, 0x63, 0x68, 0x4a, 0x6f, 0x62, 0x45, 0x72,
0x72, 0x6f, 0x72, 0x12, 0x76, 0x0a, 0x12, 0x61, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x6c,
0x69, 0x6e, 0x6b, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x86, 0x01, 0x20, 0x01, 0x28, 0x0e,
0x32, 0x45, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72,
0x73, 0x2e, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x4c, 0x69, 0x6e, 0x6b, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x4c, 0x69,
0x6e, 0x6b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x10, 0x61, 0x63, 0x63, 0x6f, 0x75,
0x6e, 0x74, 0x4c, 0x69, 0x6e, 0x6b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0xb5, 0x01, 0x0a, 0x24,
0x74, 0x68, 0x69, 0x72, 0x64, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x79, 0x5f, 0x61, 0x70, 0x70, 0x5f,
0x61, 0x6e, 0x61, 0x6c, 0x79, 0x74, 0x69, 0x63, 0x73, 0x5f, 0x6c, 0x69, 0x6e, 0x6b, 0x5f, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x18, 0x87, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x63, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61,
0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x54, 0x68, 0x69,
0x72, 0x64, 0x50, 0x61, 0x72, 0x74, 0x79, 0x41, 0x70, 0x70, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x74,
0x69, 0x63, 0x73, 0x4c, 0x69, 0x6e, 0x6b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d,
0x2e, 0x54, 0x68, 0x69, 0x72, 0x64, 0x50, 0x61, 0x72, 0x74, 0x79, 0x41, 0x70, 0x70, 0x41, 0x6e,
0x61, 0x6c, 0x79, 0x74, 0x69, 0x63, 0x73, 0x4c, 0x69, 0x6e, 0x6b, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x48, 0x00, 0x52, 0x1f, 0x74, 0x68, 0x69, 0x72, 0x64, 0x50, 0x61, 0x72, 0x74, 0x79, 0x41, 0x70,
0x70, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x74, 0x69, 0x63, 0x73, 0x4c, 0x69, 0x6e, 0x6b, 0x45, 0x72,
0x72, 0x6f, 0x72, 0x12, 0x93, 0x01, 0x0a, 0x1a, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72,
0x5f, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x65, 0x72, 0x72,
0x6f, 0x72, 0x18, 0x8a, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x53, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73,
0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x43, 0x75, 0x73, 0x74, 0x6f,
0x6d, 0x65, 0x72, 0x55, 0x73, 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x55,
0x73, 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00,
0x52, 0x17, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x55, 0x73, 0x65, 0x72, 0x41, 0x63,
0x63, 0x65, 0x73, 0x73, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x82, 0x01, 0x0a, 0x15, 0x63, 0x75,
0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x61, 0x75, 0x64, 0x69, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x65, 0x72,
0x72, 0x6f, 0x72, 0x18, 0x8b, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x4b, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64,
0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x43, 0x75, 0x73, 0x74,
0x6f, 0x6d, 0x41, 0x75, 0x64, 0x69, 0x65, 0x6e, 0x63, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x45,
0x6e, 0x75, 0x6d, 0x2e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x41, 0x75, 0x64, 0x69, 0x65, 0x6e,
0x63, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x13, 0x63, 0x75, 0x73, 0x74, 0x6f,
0x6d, 0x41, 0x75, 0x64, 0x69, 0x65, 0x6e, 0x63, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0c,
0x0a, 0x0a, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x63, 0x6f, 0x64, 0x65, 0x22, 0xd7, 0x01, 0x0a,
0x0d, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x6e,
0x0a, 0x13, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x5f, 0x65, 0x6c, 0x65,
0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3e, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61,
0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x45, 0x72, 0x72,
0x6f, 0x72, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64,
0x50, 0x61, 0x74, 0x68, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x11, 0x66, 0x69, 0x65,
0x6c, 0x64, 0x50, 0x61, 0x74, 0x68, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0x56,
0x0a, 0x10, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x50, 0x61, 0x74, 0x68, 0x45, 0x6c, 0x65, 0x6d, 0x65,
0x6e, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x6e, 0x61, 0x6d, 0x65,
0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4e, 0x61, 0x6d,
0x65, 0x12, 0x19, 0x0a, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05,
0x48, 0x00, 0x52, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x88, 0x01, 0x01, 0x42, 0x08, 0x0a, 0x06,
0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x22, 0xf1, 0x03, 0x0a, 0x0c, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, 0x34, 0x0a, 0x16, 0x75, 0x6e, 0x70, 0x75, 0x62,
0x6c, 0x69, 0x73, 0x68, 0x65, 0x64, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x63, 0x6f, 0x64,
0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x14, 0x75, 0x6e, 0x70, 0x75, 0x62, 0x6c, 0x69,
0x73, 0x68, 0x65, 0x64, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x43, 0x6f, 0x64, 0x65, 0x12, 0x70, 0x0a,
0x18, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x5f, 0x76, 0x69, 0x6f, 0x6c, 0x61, 0x74, 0x69, 0x6f,
0x6e, 0x5f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32,
0x36, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x56, 0x69, 0x6f, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e,
0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x16, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x56,
0x69, 0x6f, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12,
0x6a, 0x0a, 0x16, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x5f, 0x66, 0x69, 0x6e, 0x64, 0x69, 0x6e,
0x67, 0x5f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32,
0x34, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x2e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x44, 0x65,
0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x14, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x46, 0x69, 0x6e,
0x64, 0x69, 0x6e, 0x67, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, 0x61, 0x0a, 0x13, 0x71,
0x75, 0x6f, 0x74, 0x61, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x64, 0x65, 0x74, 0x61, 0x69,
0x6c, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e,
0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x51, 0x75, 0x6f, 0x74, 0x61, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x11, 0x71, 0x75, 0x6f,
0x74, 0x61, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, 0x6a,
0x0a, 0x16, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74,
0x5f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x34,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e,
0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x44, 0x65, 0x74,
0x61, 0x69, 0x6c, 0x73, 0x52, 0x14, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f,
0x75, 0x6e, 0x74, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0xf5, 0x01, 0x0a, 0x16, 0x50,
0x6f, 0x6c, 0x69, 0x63, 0x79, 0x56, 0x69, 0x6f, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x65,
0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, 0x3e, 0x0a, 0x1b, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61,
0x6c, 0x5f, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x5f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70,
0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x19, 0x65, 0x78, 0x74, 0x65,
0x72, 0x6e, 0x61, 0x6c, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69,
0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x44, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x04, 0x20, 0x01,
0x28, 0x0b, 0x32, 0x32, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x63, 0x6f, 0x6d,
0x6d, 0x6f, 0x6e, 0x2e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x56, 0x69, 0x6f, 0x6c, 0x61, 0x74,
0x69, 0x6f, 0x6e, 0x4b, 0x65, 0x79, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x30, 0x0a, 0x14, 0x65,
0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x5f, 0x6e,
0x61, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x65, 0x78, 0x74, 0x65, 0x72,
0x6e, 0x61, 0x6c, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x23, 0x0a,
0x0d, 0x69, 0x73, 0x5f, 0x65, 0x78, 0x65, 0x6d, 0x70, 0x74, 0x69, 0x62, 0x6c, 0x65, 0x18, 0x06,
0x20, 0x01, 0x28, 0x08, 0x52, 0x0c, 0x69, 0x73, 0x45, 0x78, 0x65, 0x6d, 0x70, 0x74, 0x69, 0x62,
0x6c, 0x65, 0x22, 0x7a, 0x0a, 0x14, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x46, 0x69, 0x6e, 0x64,
0x69, 0x6e, 0x67, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, 0x62, 0x0a, 0x14, 0x70, 0x6f,
0x6c, 0x69, 0x63, 0x79, 0x5f, 0x74, 0x6f, 0x70, 0x69, 0x63, 0x5f, 0x65, 0x6e, 0x74, 0x72, 0x69,
0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e,
0x76, 0x38, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79,
0x54, 0x6f, 0x70, 0x69, 0x63, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x12, 0x70, 0x6f, 0x6c, 0x69,
0x63, 0x79, 0x54, 0x6f, 0x70, 0x69, 0x63, 0x45, 0x6e, 0x74, 0x72, 0x69, 0x65, 0x73, 0x22, 0x99,
0x02, 0x0a, 0x11, 0x51, 0x75, 0x6f, 0x74, 0x61, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x44, 0x65, 0x74,
0x61, 0x69, 0x6c, 0x73, 0x12, 0x5f, 0x0a, 0x0a, 0x72, 0x61, 0x74, 0x65, 0x5f, 0x73, 0x63, 0x6f,
0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x40, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e,
0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x2e, 0x51, 0x75, 0x6f, 0x74, 0x61, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x2e, 0x51, 0x75, 0x6f, 0x74,
0x61, 0x52, 0x61, 0x74, 0x65, 0x53, 0x63, 0x6f, 0x70, 0x65, 0x52, 0x09, 0x72, 0x61, 0x74, 0x65,
0x53, 0x63, 0x6f, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x72, 0x61, 0x74, 0x65, 0x5f, 0x6e, 0x61,
0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x61, 0x74, 0x65, 0x4e, 0x61,
0x6d, 0x65, 0x12, 0x3a, 0x0a, 0x0b, 0x72, 0x65, 0x74, 0x72, 0x79, 0x5f, 0x64, 0x65, 0x6c, 0x61,
0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69,
0x6f, 0x6e, 0x52, 0x0a, 0x72, 0x65, 0x74, 0x72, 0x79, 0x44, 0x65, 0x6c, 0x61, 0x79, 0x22, 0x4a,
0x0a, 0x0e, 0x51, 0x75, 0x6f, 0x74, 0x61, 0x52, 0x61, 0x74, 0x65, 0x53, 0x63, 0x6f, 0x70, 0x65,
0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10,
0x00, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x01, 0x12, 0x0b,
0x0a, 0x07, 0x41, 0x43, 0x43, 0x4f, 0x55, 0x4e, 0x54, 0x10, 0x02, 0x12, 0x0d, 0x0a, 0x09, 0x44,
0x45, 0x56, 0x45, 0x4c, 0x4f, 0x50, 0x45, 0x52, 0x10, 0x03, 0x22, 0x8c, 0x02, 0x0a, 0x14, 0x52,
0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x44, 0x65, 0x74, 0x61,
0x69, 0x6c, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x65, 0x6e, 0x63, 0x6c, 0x6f, 0x73, 0x69, 0x6e, 0x67,
0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x65, 0x6e, 0x63, 0x6c, 0x6f,
0x73, 0x69, 0x6e, 0x67, 0x49, 0x64, 0x12, 0x2d, 0x0a, 0x12, 0x65, 0x6e, 0x63, 0x6c, 0x6f, 0x73,
0x69, 0x6e, 0x67, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x05, 0x20, 0x01,
0x28, 0x09, 0x52, 0x11, 0x65, 0x6e, 0x63, 0x6c, 0x6f, 0x73, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x73,
0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02,
0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x65, 0x0a, 0x0a, 0x6c,
0x69, 0x6d, 0x69, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32,
0x46, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x6e, 0x75, 0x6d, 0x73, 0x2e,
0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x54, 0x79, 0x70,
0x65, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4c, 0x69,
0x6d, 0x69, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x09, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x54, 0x79,
0x70, 0x65, 0x12, 0x25, 0x0a, 0x0e, 0x65, 0x78, 0x69, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x5f, 0x63,
0x6f, 0x75, 0x6e, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0d, 0x65, 0x78, 0x69, 0x73,
0x74, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x42, 0xe6, 0x01, 0x0a, 0x22, 0x63, 0x6f,
0x6d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73,
0x42, 0x0b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a,
0x44, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x67, 0x6f, 0x6c, 0x61, 0x6e, 0x67, 0x2e, 0x6f,
0x72, 0x67, 0x2f, 0x67, 0x65, 0x6e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x3b, 0x65,
0x72, 0x72, 0x6f, 0x72, 0x73, 0xa2, 0x02, 0x03, 0x47, 0x41, 0x41, 0xaa, 0x02, 0x1e, 0x47, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x41, 0x64, 0x73, 0x2e, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x41,
0x64, 0x73, 0x2e, 0x56, 0x38, 0x2e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x73, 0xca, 0x02, 0x1e, 0x47,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x5c, 0x41, 0x64, 0x73, 0x5c, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x41, 0x64, 0x73, 0x5c, 0x56, 0x38, 0x5c, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x73, 0xea, 0x02, 0x22,
0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x3a, 0x3a, 0x41, 0x64, 0x73, 0x3a, 0x3a, 0x47, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x41, 0x64, 0x73, 0x3a, 0x3a, 0x56, 0x38, 0x3a, 0x3a, 0x45, 0x72, 0x72, 0x6f,
0x72, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_google_ads_googleads_v8_errors_errors_proto_rawDescOnce sync.Once
file_google_ads_googleads_v8_errors_errors_proto_rawDescData = file_google_ads_googleads_v8_errors_errors_proto_rawDesc
)
func file_google_ads_googleads_v8_errors_errors_proto_rawDescGZIP() []byte {
file_google_ads_googleads_v8_errors_errors_proto_rawDescOnce.Do(func() {
file_google_ads_googleads_v8_errors_errors_proto_rawDescData = protoimpl.X.CompressGZIP(file_google_ads_googleads_v8_errors_errors_proto_rawDescData)
})
return file_google_ads_googleads_v8_errors_errors_proto_rawDescData
}
var file_google_ads_googleads_v8_errors_errors_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
var file_google_ads_googleads_v8_errors_errors_proto_msgTypes = make([]protoimpl.MessageInfo, 10)
var file_google_ads_googleads_v8_errors_errors_proto_goTypes = []interface{}{
(QuotaErrorDetails_QuotaRateScope)(0), // 0: google.ads.googleads.v8.errors.QuotaErrorDetails.QuotaRateScope
(*GoogleAdsFailure)(nil), // 1: google.ads.googleads.v8.errors.GoogleAdsFailure
(*GoogleAdsError)(nil), // 2: google.ads.googleads.v8.errors.GoogleAdsError
(*ErrorCode)(nil), // 3: google.ads.googleads.v8.errors.ErrorCode
(*ErrorLocation)(nil), // 4: google.ads.googleads.v8.errors.ErrorLocation
(*ErrorDetails)(nil), // 5: google.ads.googleads.v8.errors.ErrorDetails
(*PolicyViolationDetails)(nil), // 6: google.ads.googleads.v8.errors.PolicyViolationDetails
(*PolicyFindingDetails)(nil), // 7: google.ads.googleads.v8.errors.PolicyFindingDetails
(*QuotaErrorDetails)(nil), // 8: google.ads.googleads.v8.errors.QuotaErrorDetails
(*ResourceCountDetails)(nil), // 9: google.ads.googleads.v8.errors.ResourceCountDetails
(*ErrorLocation_FieldPathElement)(nil), // 10: google.ads.googleads.v8.errors.ErrorLocation.FieldPathElement
(*common.Value)(nil), // 11: google.ads.googleads.v8.common.Value
(RequestErrorEnum_RequestError)(0), // 12: google.ads.googleads.v8.errors.RequestErrorEnum.RequestError
(BiddingStrategyErrorEnum_BiddingStrategyError)(0), // 13: google.ads.googleads.v8.errors.BiddingStrategyErrorEnum.BiddingStrategyError
(UrlFieldErrorEnum_UrlFieldError)(0), // 14: google.ads.googleads.v8.errors.UrlFieldErrorEnum.UrlFieldError
(ListOperationErrorEnum_ListOperationError)(0), // 15: google.ads.googleads.v8.errors.ListOperationErrorEnum.ListOperationError
(QueryErrorEnum_QueryError)(0), // 16: google.ads.googleads.v8.errors.QueryErrorEnum.QueryError
(MutateErrorEnum_MutateError)(0), // 17: google.ads.googleads.v8.errors.MutateErrorEnum.MutateError
(FieldMaskErrorEnum_FieldMaskError)(0), // 18: google.ads.googleads.v8.errors.FieldMaskErrorEnum.FieldMaskError
(AuthorizationErrorEnum_AuthorizationError)(0), // 19: google.ads.googleads.v8.errors.AuthorizationErrorEnum.AuthorizationError
(InternalErrorEnum_InternalError)(0), // 20: google.ads.googleads.v8.errors.InternalErrorEnum.InternalError
(QuotaErrorEnum_QuotaError)(0), // 21: google.ads.googleads.v8.errors.QuotaErrorEnum.QuotaError
(AdErrorEnum_AdError)(0), // 22: google.ads.googleads.v8.errors.AdErrorEnum.AdError
(AdGroupErrorEnum_AdGroupError)(0), // 23: google.ads.googleads.v8.errors.AdGroupErrorEnum.AdGroupError
(CampaignBudgetErrorEnum_CampaignBudgetError)(0), // 24: google.ads.googleads.v8.errors.CampaignBudgetErrorEnum.CampaignBudgetError
(CampaignErrorEnum_CampaignError)(0), // 25: google.ads.googleads.v8.errors.CampaignErrorEnum.CampaignError
(AuthenticationErrorEnum_AuthenticationError)(0), // 26: google.ads.googleads.v8.errors.AuthenticationErrorEnum.AuthenticationError
(AdGroupCriterionErrorEnum_AdGroupCriterionError)(0), // 27: google.ads.googleads.v8.errors.AdGroupCriterionErrorEnum.AdGroupCriterionError
(AdCustomizerErrorEnum_AdCustomizerError)(0), // 28: google.ads.googleads.v8.errors.AdCustomizerErrorEnum.AdCustomizerError
(AdGroupAdErrorEnum_AdGroupAdError)(0), // 29: google.ads.googleads.v8.errors.AdGroupAdErrorEnum.AdGroupAdError
(AdSharingErrorEnum_AdSharingError)(0), // 30: google.ads.googleads.v8.errors.AdSharingErrorEnum.AdSharingError
(AdxErrorEnum_AdxError)(0), // 31: google.ads.googleads.v8.errors.AdxErrorEnum.AdxError
(AssetErrorEnum_AssetError)(0), // 32: google.ads.googleads.v8.errors.AssetErrorEnum.AssetError
(BiddingErrorEnum_BiddingError)(0), // 33: google.ads.googleads.v8.errors.BiddingErrorEnum.BiddingError
(CampaignCriterionErrorEnum_CampaignCriterionError)(0), // 34: google.ads.googleads.v8.errors.CampaignCriterionErrorEnum.CampaignCriterionError
(CollectionSizeErrorEnum_CollectionSizeError)(0), // 35: google.ads.googleads.v8.errors.CollectionSizeErrorEnum.CollectionSizeError
(CountryCodeErrorEnum_CountryCodeError)(0), // 36: google.ads.googleads.v8.errors.CountryCodeErrorEnum.CountryCodeError
(CriterionErrorEnum_CriterionError)(0), // 37: google.ads.googleads.v8.errors.CriterionErrorEnum.CriterionError
(CustomerErrorEnum_CustomerError)(0), // 38: google.ads.googleads.v8.errors.CustomerErrorEnum.CustomerError
(DateErrorEnum_DateError)(0), // 39: google.ads.googleads.v8.errors.DateErrorEnum.DateError
(DateRangeErrorEnum_DateRangeError)(0), // 40: google.ads.googleads.v8.errors.DateRangeErrorEnum.DateRangeError
(DistinctErrorEnum_DistinctError)(0), // 41: google.ads.googleads.v8.errors.DistinctErrorEnum.DistinctError
(FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError)(0), // 42: google.ads.googleads.v8.errors.FeedAttributeReferenceErrorEnum.FeedAttributeReferenceError
(FunctionErrorEnum_FunctionError)(0), // 43: google.ads.googleads.v8.errors.FunctionErrorEnum.FunctionError
(FunctionParsingErrorEnum_FunctionParsingError)(0), // 44: google.ads.googleads.v8.errors.FunctionParsingErrorEnum.FunctionParsingError
(IdErrorEnum_IdError)(0), // 45: google.ads.googleads.v8.errors.IdErrorEnum.IdError
(ImageErrorEnum_ImageError)(0), // 46: google.ads.googleads.v8.errors.ImageErrorEnum.ImageError
(LanguageCodeErrorEnum_LanguageCodeError)(0), // 47: google.ads.googleads.v8.errors.LanguageCodeErrorEnum.LanguageCodeError
(MediaBundleErrorEnum_MediaBundleError)(0), // 48: google.ads.googleads.v8.errors.MediaBundleErrorEnum.MediaBundleError
(MediaUploadErrorEnum_MediaUploadError)(0), // 49: google.ads.googleads.v8.errors.MediaUploadErrorEnum.MediaUploadError
(MediaFileErrorEnum_MediaFileError)(0), // 50: google.ads.googleads.v8.errors.MediaFileErrorEnum.MediaFileError
(MultiplierErrorEnum_MultiplierError)(0), // 51: google.ads.googleads.v8.errors.MultiplierErrorEnum.MultiplierError
(NewResourceCreationErrorEnum_NewResourceCreationError)(0), // 52: google.ads.googleads.v8.errors.NewResourceCreationErrorEnum.NewResourceCreationError
(NotEmptyErrorEnum_NotEmptyError)(0), // 53: google.ads.googleads.v8.errors.NotEmptyErrorEnum.NotEmptyError
(NullErrorEnum_NullError)(0), // 54: google.ads.googleads.v8.errors.NullErrorEnum.NullError
(OperatorErrorEnum_OperatorError)(0), // 55: google.ads.googleads.v8.errors.OperatorErrorEnum.OperatorError
(RangeErrorEnum_RangeError)(0), // 56: google.ads.googleads.v8.errors.RangeErrorEnum.RangeError
(RecommendationErrorEnum_RecommendationError)(0), // 57: google.ads.googleads.v8.errors.RecommendationErrorEnum.RecommendationError
(RegionCodeErrorEnum_RegionCodeError)(0), // 58: google.ads.googleads.v8.errors.RegionCodeErrorEnum.RegionCodeError
(SettingErrorEnum_SettingError)(0), // 59: google.ads.googleads.v8.errors.SettingErrorEnum.SettingError
(StringFormatErrorEnum_StringFormatError)(0), // 60: google.ads.googleads.v8.errors.StringFormatErrorEnum.StringFormatError
(StringLengthErrorEnum_StringLengthError)(0), // 61: google.ads.googleads.v8.errors.StringLengthErrorEnum.StringLengthError
(OperationAccessDeniedErrorEnum_OperationAccessDeniedError)(0), // 62: google.ads.googleads.v8.errors.OperationAccessDeniedErrorEnum.OperationAccessDeniedError
(ResourceAccessDeniedErrorEnum_ResourceAccessDeniedError)(0), // 63: google.ads.googleads.v8.errors.ResourceAccessDeniedErrorEnum.ResourceAccessDeniedError
(ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError)(0), // 64: google.ads.googleads.v8.errors.ResourceCountLimitExceededErrorEnum.ResourceCountLimitExceededError
(YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError)(0), // 65: google.ads.googleads.v8.errors.YoutubeVideoRegistrationErrorEnum.YoutubeVideoRegistrationError
(AdGroupBidModifierErrorEnum_AdGroupBidModifierError)(0), // 66: google.ads.googleads.v8.errors.AdGroupBidModifierErrorEnum.AdGroupBidModifierError
(ContextErrorEnum_ContextError)(0), // 67: google.ads.googleads.v8.errors.ContextErrorEnum.ContextError
(FieldErrorEnum_FieldError)(0), // 68: google.ads.googleads.v8.errors.FieldErrorEnum.FieldError
(SharedSetErrorEnum_SharedSetError)(0), // 69: google.ads.googleads.v8.errors.SharedSetErrorEnum.SharedSetError
(SharedCriterionErrorEnum_SharedCriterionError)(0), // 70: google.ads.googleads.v8.errors.SharedCriterionErrorEnum.SharedCriterionError
(CampaignSharedSetErrorEnum_CampaignSharedSetError)(0), // 71: google.ads.googleads.v8.errors.CampaignSharedSetErrorEnum.CampaignSharedSetError
(ConversionActionErrorEnum_ConversionActionError)(0), // 72: google.ads.googleads.v8.errors.ConversionActionErrorEnum.ConversionActionError
(ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError)(0), // 73: google.ads.googleads.v8.errors.ConversionAdjustmentUploadErrorEnum.ConversionAdjustmentUploadError
(ConversionCustomVariableErrorEnum_ConversionCustomVariableError)(0), // 74: google.ads.googleads.v8.errors.ConversionCustomVariableErrorEnum.ConversionCustomVariableError
(ConversionUploadErrorEnum_ConversionUploadError)(0), // 75: google.ads.googleads.v8.errors.ConversionUploadErrorEnum.ConversionUploadError
(ConversionValueRuleErrorEnum_ConversionValueRuleError)(0), // 76: google.ads.googleads.v8.errors.ConversionValueRuleErrorEnum.ConversionValueRuleError
(ConversionValueRuleSetErrorEnum_ConversionValueRuleSetError)(0), // 77: google.ads.googleads.v8.errors.ConversionValueRuleSetErrorEnum.ConversionValueRuleSetError
(HeaderErrorEnum_HeaderError)(0), // 78: google.ads.googleads.v8.errors.HeaderErrorEnum.HeaderError
(DatabaseErrorEnum_DatabaseError)(0), // 79: google.ads.googleads.v8.errors.DatabaseErrorEnum.DatabaseError
(PolicyFindingErrorEnum_PolicyFindingError)(0), // 80: google.ads.googleads.v8.errors.PolicyFindingErrorEnum.PolicyFindingError
(EnumErrorEnum_EnumError)(0), // 81: google.ads.googleads.v8.errors.EnumErrorEnum.EnumError
(KeywordPlanErrorEnum_KeywordPlanError)(0), // 82: google.ads.googleads.v8.errors.KeywordPlanErrorEnum.KeywordPlanError
(KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError)(0), // 83: google.ads.googleads.v8.errors.KeywordPlanCampaignErrorEnum.KeywordPlanCampaignError
(KeywordPlanCampaignKeywordErrorEnum_KeywordPlanCampaignKeywordError)(0), // 84: google.ads.googleads.v8.errors.KeywordPlanCampaignKeywordErrorEnum.KeywordPlanCampaignKeywordError
(KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError)(0), // 85: google.ads.googleads.v8.errors.KeywordPlanAdGroupErrorEnum.KeywordPlanAdGroupError
(KeywordPlanAdGroupKeywordErrorEnum_KeywordPlanAdGroupKeywordError)(0), // 86: google.ads.googleads.v8.errors.KeywordPlanAdGroupKeywordErrorEnum.KeywordPlanAdGroupKeywordError
(KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError)(0), // 87: google.ads.googleads.v8.errors.KeywordPlanIdeaErrorEnum.KeywordPlanIdeaError
(AccountBudgetProposalErrorEnum_AccountBudgetProposalError)(0), // 88: google.ads.googleads.v8.errors.AccountBudgetProposalErrorEnum.AccountBudgetProposalError
(UserListErrorEnum_UserListError)(0), // 89: google.ads.googleads.v8.errors.UserListErrorEnum.UserListError
(ChangeEventErrorEnum_ChangeEventError)(0), // 90: google.ads.googleads.v8.errors.ChangeEventErrorEnum.ChangeEventError
(ChangeStatusErrorEnum_ChangeStatusError)(0), // 91: google.ads.googleads.v8.errors.ChangeStatusErrorEnum.ChangeStatusError
(FeedErrorEnum_FeedError)(0), // 92: google.ads.googleads.v8.errors.FeedErrorEnum.FeedError
(GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError)(0), // 93: google.ads.googleads.v8.errors.GeoTargetConstantSuggestionErrorEnum.GeoTargetConstantSuggestionError
(CampaignDraftErrorEnum_CampaignDraftError)(0), // 94: google.ads.googleads.v8.errors.CampaignDraftErrorEnum.CampaignDraftError
(FeedItemErrorEnum_FeedItemError)(0), // 95: google.ads.googleads.v8.errors.FeedItemErrorEnum.FeedItemError
(LabelErrorEnum_LabelError)(0), // 96: google.ads.googleads.v8.errors.LabelErrorEnum.LabelError
(BillingSetupErrorEnum_BillingSetupError)(0), // 97: google.ads.googleads.v8.errors.BillingSetupErrorEnum.BillingSetupError
(CustomerClientLinkErrorEnum_CustomerClientLinkError)(0), // 98: google.ads.googleads.v8.errors.CustomerClientLinkErrorEnum.CustomerClientLinkError
(CustomerManagerLinkErrorEnum_CustomerManagerLinkError)(0), // 99: google.ads.googleads.v8.errors.CustomerManagerLinkErrorEnum.CustomerManagerLinkError
(FeedMappingErrorEnum_FeedMappingError)(0), // 100: google.ads.googleads.v8.errors.FeedMappingErrorEnum.FeedMappingError
(CustomerFeedErrorEnum_CustomerFeedError)(0), // 101: google.ads.googleads.v8.errors.CustomerFeedErrorEnum.CustomerFeedError
(AdGroupFeedErrorEnum_AdGroupFeedError)(0), // 102: google.ads.googleads.v8.errors.AdGroupFeedErrorEnum.AdGroupFeedError
(CampaignFeedErrorEnum_CampaignFeedError)(0), // 103: google.ads.googleads.v8.errors.CampaignFeedErrorEnum.CampaignFeedError
(CustomInterestErrorEnum_CustomInterestError)(0), // 104: google.ads.googleads.v8.errors.CustomInterestErrorEnum.CustomInterestError
(CampaignExperimentErrorEnum_CampaignExperimentError)(0), // 105: google.ads.googleads.v8.errors.CampaignExperimentErrorEnum.CampaignExperimentError
(ExtensionFeedItemErrorEnum_ExtensionFeedItemError)(0), // 106: google.ads.googleads.v8.errors.ExtensionFeedItemErrorEnum.ExtensionFeedItemError
(AdParameterErrorEnum_AdParameterError)(0), // 107: google.ads.googleads.v8.errors.AdParameterErrorEnum.AdParameterError
(FeedItemValidationErrorEnum_FeedItemValidationError)(0), // 108: google.ads.googleads.v8.errors.FeedItemValidationErrorEnum.FeedItemValidationError
(ExtensionSettingErrorEnum_ExtensionSettingError)(0), // 109: google.ads.googleads.v8.errors.ExtensionSettingErrorEnum.ExtensionSettingError
(FeedItemSetErrorEnum_FeedItemSetError)(0), // 110: google.ads.googleads.v8.errors.FeedItemSetErrorEnum.FeedItemSetError
(FeedItemSetLinkErrorEnum_FeedItemSetLinkError)(0), // 111: google.ads.googleads.v8.errors.FeedItemSetLinkErrorEnum.FeedItemSetLinkError
(FeedItemTargetErrorEnum_FeedItemTargetError)(0), // 112: google.ads.googleads.v8.errors.FeedItemTargetErrorEnum.FeedItemTargetError
(PolicyViolationErrorEnum_PolicyViolationError)(0), // 113: google.ads.googleads.v8.errors.PolicyViolationErrorEnum.PolicyViolationError
(PartialFailureErrorEnum_PartialFailureError)(0), // 114: google.ads.googleads.v8.errors.PartialFailureErrorEnum.PartialFailureError
(PolicyValidationParameterErrorEnum_PolicyValidationParameterError)(0), // 115: google.ads.googleads.v8.errors.PolicyValidationParameterErrorEnum.PolicyValidationParameterError
(SizeLimitErrorEnum_SizeLimitError)(0), // 116: google.ads.googleads.v8.errors.SizeLimitErrorEnum.SizeLimitError
(OfflineUserDataJobErrorEnum_OfflineUserDataJobError)(0), // 117: google.ads.googleads.v8.errors.OfflineUserDataJobErrorEnum.OfflineUserDataJobError
(NotAllowlistedErrorEnum_NotAllowlistedError)(0), // 118: google.ads.googleads.v8.errors.NotAllowlistedErrorEnum.NotAllowlistedError
(ManagerLinkErrorEnum_ManagerLinkError)(0), // 119: google.ads.googleads.v8.errors.ManagerLinkErrorEnum.ManagerLinkError
(CurrencyCodeErrorEnum_CurrencyCodeError)(0), // 120: google.ads.googleads.v8.errors.CurrencyCodeErrorEnum.CurrencyCodeError
(AccessInvitationErrorEnum_AccessInvitationError)(0), // 121: google.ads.googleads.v8.errors.AccessInvitationErrorEnum.AccessInvitationError
(ReachPlanErrorEnum_ReachPlanError)(0), // 122: google.ads.googleads.v8.errors.ReachPlanErrorEnum.ReachPlanError
(InvoiceErrorEnum_InvoiceError)(0), // 123: google.ads.googleads.v8.errors.InvoiceErrorEnum.InvoiceError
(PaymentsAccountErrorEnum_PaymentsAccountError)(0), // 124: google.ads.googleads.v8.errors.PaymentsAccountErrorEnum.PaymentsAccountError
(TimeZoneErrorEnum_TimeZoneError)(0), // 125: google.ads.googleads.v8.errors.TimeZoneErrorEnum.TimeZoneError
(AssetLinkErrorEnum_AssetLinkError)(0), // 126: google.ads.googleads.v8.errors.AssetLinkErrorEnum.AssetLinkError
(UserDataErrorEnum_UserDataError)(0), // 127: google.ads.googleads.v8.errors.UserDataErrorEnum.UserDataError
(BatchJobErrorEnum_BatchJobError)(0), // 128: google.ads.googleads.v8.errors.BatchJobErrorEnum.BatchJobError
(AccountLinkErrorEnum_AccountLinkError)(0), // 129: google.ads.googleads.v8.errors.AccountLinkErrorEnum.AccountLinkError
(ThirdPartyAppAnalyticsLinkErrorEnum_ThirdPartyAppAnalyticsLinkError)(0), // 130: google.ads.googleads.v8.errors.ThirdPartyAppAnalyticsLinkErrorEnum.ThirdPartyAppAnalyticsLinkError
(CustomerUserAccessErrorEnum_CustomerUserAccessError)(0), // 131: google.ads.googleads.v8.errors.CustomerUserAccessErrorEnum.CustomerUserAccessError
(CustomAudienceErrorEnum_CustomAudienceError)(0), // 132: google.ads.googleads.v8.errors.CustomAudienceErrorEnum.CustomAudienceError
(*common.PolicyViolationKey)(nil), // 133: google.ads.googleads.v8.common.PolicyViolationKey
(*common.PolicyTopicEntry)(nil), // 134: google.ads.googleads.v8.common.PolicyTopicEntry
(*durationpb.Duration)(nil), // 135: google.protobuf.Duration
(enums.ResourceLimitTypeEnum_ResourceLimitType)(0), // 136: google.ads.googleads.v8.enums.ResourceLimitTypeEnum.ResourceLimitType
}
var file_google_ads_googleads_v8_errors_errors_proto_depIdxs = []int32{
2, // 0: google.ads.googleads.v8.errors.GoogleAdsFailure.errors:type_name -> google.ads.googleads.v8.errors.GoogleAdsError
3, // 1: google.ads.googleads.v8.errors.GoogleAdsError.error_code:type_name -> google.ads.googleads.v8.errors.ErrorCode
11, // 2: google.ads.googleads.v8.errors.GoogleAdsError.trigger:type_name -> google.ads.googleads.v8.common.Value
4, // 3: google.ads.googleads.v8.errors.GoogleAdsError.location:type_name -> google.ads.googleads.v8.errors.ErrorLocation
5, // 4: google.ads.googleads.v8.errors.GoogleAdsError.details:type_name -> google.ads.googleads.v8.errors.ErrorDetails
12, // 5: google.ads.googleads.v8.errors.ErrorCode.request_error:type_name -> google.ads.googleads.v8.errors.RequestErrorEnum.RequestError
13, // 6: google.ads.googleads.v8.errors.ErrorCode.bidding_strategy_error:type_name -> google.ads.googleads.v8.errors.BiddingStrategyErrorEnum.BiddingStrategyError
14, // 7: google.ads.googleads.v8.errors.ErrorCode.url_field_error:type_name -> google.ads.googleads.v8.errors.UrlFieldErrorEnum.UrlFieldError
15, // 8: google.ads.googleads.v8.errors.ErrorCode.list_operation_error:type_name -> google.ads.googleads.v8.errors.ListOperationErrorEnum.ListOperationError
16, // 9: google.ads.googleads.v8.errors.ErrorCode.query_error:type_name -> google.ads.googleads.v8.errors.QueryErrorEnum.QueryError
17, // 10: google.ads.googleads.v8.errors.ErrorCode.mutate_error:type_name -> google.ads.googleads.v8.errors.MutateErrorEnum.MutateError
18, // 11: google.ads.googleads.v8.errors.ErrorCode.field_mask_error:type_name -> google.ads.googleads.v8.errors.FieldMaskErrorEnum.FieldMaskError
19, // 12: google.ads.googleads.v8.errors.ErrorCode.authorization_error:type_name -> google.ads.googleads.v8.errors.AuthorizationErrorEnum.AuthorizationError
20, // 13: google.ads.googleads.v8.errors.ErrorCode.internal_error:type_name -> google.ads.googleads.v8.errors.InternalErrorEnum.InternalError
21, // 14: google.ads.googleads.v8.errors.ErrorCode.quota_error:type_name -> google.ads.googleads.v8.errors.QuotaErrorEnum.QuotaError
22, // 15: google.ads.googleads.v8.errors.ErrorCode.ad_error:type_name -> google.ads.googleads.v8.errors.AdErrorEnum.AdError
23, // 16: google.ads.googleads.v8.errors.ErrorCode.ad_group_error:type_name -> google.ads.googleads.v8.errors.AdGroupErrorEnum.AdGroupError
24, // 17: google.ads.googleads.v8.errors.ErrorCode.campaign_budget_error:type_name -> google.ads.googleads.v8.errors.CampaignBudgetErrorEnum.CampaignBudgetError
25, // 18: google.ads.googleads.v8.errors.ErrorCode.campaign_error:type_name -> google.ads.googleads.v8.errors.CampaignErrorEnum.CampaignError
26, // 19: google.ads.googleads.v8.errors.ErrorCode.authentication_error:type_name -> google.ads.googleads.v8.errors.AuthenticationErrorEnum.AuthenticationError
27, // 20: google.ads.googleads.v8.errors.ErrorCode.ad_group_criterion_error:type_name -> google.ads.googleads.v8.errors.AdGroupCriterionErrorEnum.AdGroupCriterionError
28, // 21: google.ads.googleads.v8.errors.ErrorCode.ad_customizer_error:type_name -> google.ads.googleads.v8.errors.AdCustomizerErrorEnum.AdCustomizerError
29, // 22: google.ads.googleads.v8.errors.ErrorCode.ad_group_ad_error:type_name -> google.ads.googleads.v8.errors.AdGroupAdErrorEnum.AdGroupAdError
30, // 23: google.ads.googleads.v8.errors.ErrorCode.ad_sharing_error:type_name -> google.ads.googleads.v8.errors.AdSharingErrorEnum.AdSharingError
31, // 24: google.ads.googleads.v8.errors.ErrorCode.adx_error:type_name -> google.ads.googleads.v8.errors.AdxErrorEnum.AdxError
32, // 25: google.ads.googleads.v8.errors.ErrorCode.asset_error:type_name -> google.ads.googleads.v8.errors.AssetErrorEnum.AssetError
33, // 26: google.ads.googleads.v8.errors.ErrorCode.bidding_error:type_name -> google.ads.googleads.v8.errors.BiddingErrorEnum.BiddingError
34, // 27: google.ads.googleads.v8.errors.ErrorCode.campaign_criterion_error:type_name -> google.ads.googleads.v8.errors.CampaignCriterionErrorEnum.CampaignCriterionError
35, // 28: google.ads.googleads.v8.errors.ErrorCode.collection_size_error:type_name -> google.ads.googleads.v8.errors.CollectionSizeErrorEnum.CollectionSizeError
36, // 29: google.ads.googleads.v8.errors.ErrorCode.country_code_error:type_name -> google.ads.googleads.v8.errors.CountryCodeErrorEnum.CountryCodeError
37, // 30: google.ads.googleads.v8.errors.ErrorCode.criterion_error:type_name -> google.ads.googleads.v8.errors.CriterionErrorEnum.CriterionError
38, // 31: google.ads.googleads.v8.errors.ErrorCode.customer_error:type_name -> google.ads.googleads.v8.errors.CustomerErrorEnum.CustomerError
39, // 32: google.ads.googleads.v8.errors.ErrorCode.date_error:type_name -> google.ads.googleads.v8.errors.DateErrorEnum.DateError
40, // 33: google.ads.googleads.v8.errors.ErrorCode.date_range_error:type_name -> google.ads.googleads.v8.errors.DateRangeErrorEnum.DateRangeError
41, // 34: google.ads.googleads.v8.errors.ErrorCode.distinct_error:type_name -> google.ads.googleads.v8.errors.DistinctErrorEnum.DistinctError
42, // 35: google.ads.googleads.v8.errors.ErrorCode.feed_attribute_reference_error:type_name -> google.ads.googleads.v8.errors.FeedAttributeReferenceErrorEnum.FeedAttributeReferenceError
43, // 36: google.ads.googleads.v8.errors.ErrorCode.function_error:type_name -> google.ads.googleads.v8.errors.FunctionErrorEnum.FunctionError
44, // 37: google.ads.googleads.v8.errors.ErrorCode.function_parsing_error:type_name -> google.ads.googleads.v8.errors.FunctionParsingErrorEnum.FunctionParsingError
45, // 38: google.ads.googleads.v8.errors.ErrorCode.id_error:type_name -> google.ads.googleads.v8.errors.IdErrorEnum.IdError
46, // 39: google.ads.googleads.v8.errors.ErrorCode.image_error:type_name -> google.ads.googleads.v8.errors.ImageErrorEnum.ImageError
47, // 40: google.ads.googleads.v8.errors.ErrorCode.language_code_error:type_name -> google.ads.googleads.v8.errors.LanguageCodeErrorEnum.LanguageCodeError
48, // 41: google.ads.googleads.v8.errors.ErrorCode.media_bundle_error:type_name -> google.ads.googleads.v8.errors.MediaBundleErrorEnum.MediaBundleError
49, // 42: google.ads.googleads.v8.errors.ErrorCode.media_upload_error:type_name -> google.ads.googleads.v8.errors.MediaUploadErrorEnum.MediaUploadError
50, // 43: google.ads.googleads.v8.errors.ErrorCode.media_file_error:type_name -> google.ads.googleads.v8.errors.MediaFileErrorEnum.MediaFileError
51, // 44: google.ads.googleads.v8.errors.ErrorCode.multiplier_error:type_name -> google.ads.googleads.v8.errors.MultiplierErrorEnum.MultiplierError
52, // 45: google.ads.googleads.v8.errors.ErrorCode.new_resource_creation_error:type_name -> google.ads.googleads.v8.errors.NewResourceCreationErrorEnum.NewResourceCreationError
53, // 46: google.ads.googleads.v8.errors.ErrorCode.not_empty_error:type_name -> google.ads.googleads.v8.errors.NotEmptyErrorEnum.NotEmptyError
54, // 47: google.ads.googleads.v8.errors.ErrorCode.null_error:type_name -> google.ads.googleads.v8.errors.NullErrorEnum.NullError
55, // 48: google.ads.googleads.v8.errors.ErrorCode.operator_error:type_name -> google.ads.googleads.v8.errors.OperatorErrorEnum.OperatorError
56, // 49: google.ads.googleads.v8.errors.ErrorCode.range_error:type_name -> google.ads.googleads.v8.errors.RangeErrorEnum.RangeError
57, // 50: google.ads.googleads.v8.errors.ErrorCode.recommendation_error:type_name -> google.ads.googleads.v8.errors.RecommendationErrorEnum.RecommendationError
58, // 51: google.ads.googleads.v8.errors.ErrorCode.region_code_error:type_name -> google.ads.googleads.v8.errors.RegionCodeErrorEnum.RegionCodeError
59, // 52: google.ads.googleads.v8.errors.ErrorCode.setting_error:type_name -> google.ads.googleads.v8.errors.SettingErrorEnum.SettingError
60, // 53: google.ads.googleads.v8.errors.ErrorCode.string_format_error:type_name -> google.ads.googleads.v8.errors.StringFormatErrorEnum.StringFormatError
61, // 54: google.ads.googleads.v8.errors.ErrorCode.string_length_error:type_name -> google.ads.googleads.v8.errors.StringLengthErrorEnum.StringLengthError
62, // 55: google.ads.googleads.v8.errors.ErrorCode.operation_access_denied_error:type_name -> google.ads.googleads.v8.errors.OperationAccessDeniedErrorEnum.OperationAccessDeniedError
63, // 56: google.ads.googleads.v8.errors.ErrorCode.resource_access_denied_error:type_name -> google.ads.googleads.v8.errors.ResourceAccessDeniedErrorEnum.ResourceAccessDeniedError
64, // 57: google.ads.googleads.v8.errors.ErrorCode.resource_count_limit_exceeded_error:type_name -> google.ads.googleads.v8.errors.ResourceCountLimitExceededErrorEnum.ResourceCountLimitExceededError
65, // 58: google.ads.googleads.v8.errors.ErrorCode.youtube_video_registration_error:type_name -> google.ads.googleads.v8.errors.YoutubeVideoRegistrationErrorEnum.YoutubeVideoRegistrationError
66, // 59: google.ads.googleads.v8.errors.ErrorCode.ad_group_bid_modifier_error:type_name -> google.ads.googleads.v8.errors.AdGroupBidModifierErrorEnum.AdGroupBidModifierError
67, // 60: google.ads.googleads.v8.errors.ErrorCode.context_error:type_name -> google.ads.googleads.v8.errors.ContextErrorEnum.ContextError
68, // 61: google.ads.googleads.v8.errors.ErrorCode.field_error:type_name -> google.ads.googleads.v8.errors.FieldErrorEnum.FieldError
69, // 62: google.ads.googleads.v8.errors.ErrorCode.shared_set_error:type_name -> google.ads.googleads.v8.errors.SharedSetErrorEnum.SharedSetError
70, // 63: google.ads.googleads.v8.errors.ErrorCode.shared_criterion_error:type_name -> google.ads.googleads.v8.errors.SharedCriterionErrorEnum.SharedCriterionError
71, // 64: google.ads.googleads.v8.errors.ErrorCode.campaign_shared_set_error:type_name -> google.ads.googleads.v8.errors.CampaignSharedSetErrorEnum.CampaignSharedSetError
72, // 65: google.ads.googleads.v8.errors.ErrorCode.conversion_action_error:type_name -> google.ads.googleads.v8.errors.ConversionActionErrorEnum.ConversionActionError
73, // 66: google.ads.googleads.v8.errors.ErrorCode.conversion_adjustment_upload_error:type_name -> google.ads.googleads.v8.errors.ConversionAdjustmentUploadErrorEnum.ConversionAdjustmentUploadError
74, // 67: google.ads.googleads.v8.errors.ErrorCode.conversion_custom_variable_error:type_name -> google.ads.googleads.v8.errors.ConversionCustomVariableErrorEnum.ConversionCustomVariableError
75, // 68: google.ads.googleads.v8.errors.ErrorCode.conversion_upload_error:type_name -> google.ads.googleads.v8.errors.ConversionUploadErrorEnum.ConversionUploadError
76, // 69: google.ads.googleads.v8.errors.ErrorCode.conversion_value_rule_error:type_name -> google.ads.googleads.v8.errors.ConversionValueRuleErrorEnum.ConversionValueRuleError
77, // 70: google.ads.googleads.v8.errors.ErrorCode.conversion_value_rule_set_error:type_name -> google.ads.googleads.v8.errors.ConversionValueRuleSetErrorEnum.ConversionValueRuleSetError
78, // 71: google.ads.googleads.v8.errors.ErrorCode.header_error:type_name -> google.ads.googleads.v8.errors.HeaderErrorEnum.HeaderError
79, // 72: google.ads.googleads.v8.errors.ErrorCode.database_error:type_name -> google.ads.googleads.v8.errors.DatabaseErrorEnum.DatabaseError
80, // 73: google.ads.googleads.v8.errors.ErrorCode.policy_finding_error:type_name -> google.ads.googleads.v8.errors.PolicyFindingErrorEnum.PolicyFindingError
81, // 74: google.ads.googleads.v8.errors.ErrorCode.enum_error:type_name -> google.ads.googleads.v8.errors.EnumErrorEnum.EnumError
82, // 75: google.ads.googleads.v8.errors.ErrorCode.keyword_plan_error:type_name -> google.ads.googleads.v8.errors.KeywordPlanErrorEnum.KeywordPlanError
83, // 76: google.ads.googleads.v8.errors.ErrorCode.keyword_plan_campaign_error:type_name -> google.ads.googleads.v8.errors.KeywordPlanCampaignErrorEnum.KeywordPlanCampaignError
84, // 77: google.ads.googleads.v8.errors.ErrorCode.keyword_plan_campaign_keyword_error:type_name -> google.ads.googleads.v8.errors.KeywordPlanCampaignKeywordErrorEnum.KeywordPlanCampaignKeywordError
85, // 78: google.ads.googleads.v8.errors.ErrorCode.keyword_plan_ad_group_error:type_name -> google.ads.googleads.v8.errors.KeywordPlanAdGroupErrorEnum.KeywordPlanAdGroupError
86, // 79: google.ads.googleads.v8.errors.ErrorCode.keyword_plan_ad_group_keyword_error:type_name -> google.ads.googleads.v8.errors.KeywordPlanAdGroupKeywordErrorEnum.KeywordPlanAdGroupKeywordError
87, // 80: google.ads.googleads.v8.errors.ErrorCode.keyword_plan_idea_error:type_name -> google.ads.googleads.v8.errors.KeywordPlanIdeaErrorEnum.KeywordPlanIdeaError
88, // 81: google.ads.googleads.v8.errors.ErrorCode.account_budget_proposal_error:type_name -> google.ads.googleads.v8.errors.AccountBudgetProposalErrorEnum.AccountBudgetProposalError
89, // 82: google.ads.googleads.v8.errors.ErrorCode.user_list_error:type_name -> google.ads.googleads.v8.errors.UserListErrorEnum.UserListError
90, // 83: google.ads.googleads.v8.errors.ErrorCode.change_event_error:type_name -> google.ads.googleads.v8.errors.ChangeEventErrorEnum.ChangeEventError
91, // 84: google.ads.googleads.v8.errors.ErrorCode.change_status_error:type_name -> google.ads.googleads.v8.errors.ChangeStatusErrorEnum.ChangeStatusError
92, // 85: google.ads.googleads.v8.errors.ErrorCode.feed_error:type_name -> google.ads.googleads.v8.errors.FeedErrorEnum.FeedError
93, // 86: google.ads.googleads.v8.errors.ErrorCode.geo_target_constant_suggestion_error:type_name -> google.ads.googleads.v8.errors.GeoTargetConstantSuggestionErrorEnum.GeoTargetConstantSuggestionError
94, // 87: google.ads.googleads.v8.errors.ErrorCode.campaign_draft_error:type_name -> google.ads.googleads.v8.errors.CampaignDraftErrorEnum.CampaignDraftError
95, // 88: google.ads.googleads.v8.errors.ErrorCode.feed_item_error:type_name -> google.ads.googleads.v8.errors.FeedItemErrorEnum.FeedItemError
96, // 89: google.ads.googleads.v8.errors.ErrorCode.label_error:type_name -> google.ads.googleads.v8.errors.LabelErrorEnum.LabelError
97, // 90: google.ads.googleads.v8.errors.ErrorCode.billing_setup_error:type_name -> google.ads.googleads.v8.errors.BillingSetupErrorEnum.BillingSetupError
98, // 91: google.ads.googleads.v8.errors.ErrorCode.customer_client_link_error:type_name -> google.ads.googleads.v8.errors.CustomerClientLinkErrorEnum.CustomerClientLinkError
99, // 92: google.ads.googleads.v8.errors.ErrorCode.customer_manager_link_error:type_name -> google.ads.googleads.v8.errors.CustomerManagerLinkErrorEnum.CustomerManagerLinkError
100, // 93: google.ads.googleads.v8.errors.ErrorCode.feed_mapping_error:type_name -> google.ads.googleads.v8.errors.FeedMappingErrorEnum.FeedMappingError
101, // 94: google.ads.googleads.v8.errors.ErrorCode.customer_feed_error:type_name -> google.ads.googleads.v8.errors.CustomerFeedErrorEnum.CustomerFeedError
102, // 95: google.ads.googleads.v8.errors.ErrorCode.ad_group_feed_error:type_name -> google.ads.googleads.v8.errors.AdGroupFeedErrorEnum.AdGroupFeedError
103, // 96: google.ads.googleads.v8.errors.ErrorCode.campaign_feed_error:type_name -> google.ads.googleads.v8.errors.CampaignFeedErrorEnum.CampaignFeedError
104, // 97: google.ads.googleads.v8.errors.ErrorCode.custom_interest_error:type_name -> google.ads.googleads.v8.errors.CustomInterestErrorEnum.CustomInterestError
105, // 98: google.ads.googleads.v8.errors.ErrorCode.campaign_experiment_error:type_name -> google.ads.googleads.v8.errors.CampaignExperimentErrorEnum.CampaignExperimentError
106, // 99: google.ads.googleads.v8.errors.ErrorCode.extension_feed_item_error:type_name -> google.ads.googleads.v8.errors.ExtensionFeedItemErrorEnum.ExtensionFeedItemError
107, // 100: google.ads.googleads.v8.errors.ErrorCode.ad_parameter_error:type_name -> google.ads.googleads.v8.errors.AdParameterErrorEnum.AdParameterError
108, // 101: google.ads.googleads.v8.errors.ErrorCode.feed_item_validation_error:type_name -> google.ads.googleads.v8.errors.FeedItemValidationErrorEnum.FeedItemValidationError
109, // 102: google.ads.googleads.v8.errors.ErrorCode.extension_setting_error:type_name -> google.ads.googleads.v8.errors.ExtensionSettingErrorEnum.ExtensionSettingError
110, // 103: google.ads.googleads.v8.errors.ErrorCode.feed_item_set_error:type_name -> google.ads.googleads.v8.errors.FeedItemSetErrorEnum.FeedItemSetError
111, // 104: google.ads.googleads.v8.errors.ErrorCode.feed_item_set_link_error:type_name -> google.ads.googleads.v8.errors.FeedItemSetLinkErrorEnum.FeedItemSetLinkError
112, // 105: google.ads.googleads.v8.errors.ErrorCode.feed_item_target_error:type_name -> google.ads.googleads.v8.errors.FeedItemTargetErrorEnum.FeedItemTargetError
113, // 106: google.ads.googleads.v8.errors.ErrorCode.policy_violation_error:type_name -> google.ads.googleads.v8.errors.PolicyViolationErrorEnum.PolicyViolationError
114, // 107: google.ads.googleads.v8.errors.ErrorCode.partial_failure_error:type_name -> google.ads.googleads.v8.errors.PartialFailureErrorEnum.PartialFailureError
115, // 108: google.ads.googleads.v8.errors.ErrorCode.policy_validation_parameter_error:type_name -> google.ads.googleads.v8.errors.PolicyValidationParameterErrorEnum.PolicyValidationParameterError
116, // 109: google.ads.googleads.v8.errors.ErrorCode.size_limit_error:type_name -> google.ads.googleads.v8.errors.SizeLimitErrorEnum.SizeLimitError
117, // 110: google.ads.googleads.v8.errors.ErrorCode.offline_user_data_job_error:type_name -> google.ads.googleads.v8.errors.OfflineUserDataJobErrorEnum.OfflineUserDataJobError
118, // 111: google.ads.googleads.v8.errors.ErrorCode.not_allowlisted_error:type_name -> google.ads.googleads.v8.errors.NotAllowlistedErrorEnum.NotAllowlistedError
119, // 112: google.ads.googleads.v8.errors.ErrorCode.manager_link_error:type_name -> google.ads.googleads.v8.errors.ManagerLinkErrorEnum.ManagerLinkError
120, // 113: google.ads.googleads.v8.errors.ErrorCode.currency_code_error:type_name -> google.ads.googleads.v8.errors.CurrencyCodeErrorEnum.CurrencyCodeError
121, // 114: google.ads.googleads.v8.errors.ErrorCode.access_invitation_error:type_name -> google.ads.googleads.v8.errors.AccessInvitationErrorEnum.AccessInvitationError
122, // 115: google.ads.googleads.v8.errors.ErrorCode.reach_plan_error:type_name -> google.ads.googleads.v8.errors.ReachPlanErrorEnum.ReachPlanError
123, // 116: google.ads.googleads.v8.errors.ErrorCode.invoice_error:type_name -> google.ads.googleads.v8.errors.InvoiceErrorEnum.InvoiceError
124, // 117: google.ads.googleads.v8.errors.ErrorCode.payments_account_error:type_name -> google.ads.googleads.v8.errors.PaymentsAccountErrorEnum.PaymentsAccountError
125, // 118: google.ads.googleads.v8.errors.ErrorCode.time_zone_error:type_name -> google.ads.googleads.v8.errors.TimeZoneErrorEnum.TimeZoneError
126, // 119: google.ads.googleads.v8.errors.ErrorCode.asset_link_error:type_name -> google.ads.googleads.v8.errors.AssetLinkErrorEnum.AssetLinkError
127, // 120: google.ads.googleads.v8.errors.ErrorCode.user_data_error:type_name -> google.ads.googleads.v8.errors.UserDataErrorEnum.UserDataError
128, // 121: google.ads.googleads.v8.errors.ErrorCode.batch_job_error:type_name -> google.ads.googleads.v8.errors.BatchJobErrorEnum.BatchJobError
129, // 122: google.ads.googleads.v8.errors.ErrorCode.account_link_error:type_name -> google.ads.googleads.v8.errors.AccountLinkErrorEnum.AccountLinkError
130, // 123: google.ads.googleads.v8.errors.ErrorCode.third_party_app_analytics_link_error:type_name -> google.ads.googleads.v8.errors.ThirdPartyAppAnalyticsLinkErrorEnum.ThirdPartyAppAnalyticsLinkError
131, // 124: google.ads.googleads.v8.errors.ErrorCode.customer_user_access_error:type_name -> google.ads.googleads.v8.errors.CustomerUserAccessErrorEnum.CustomerUserAccessError
132, // 125: google.ads.googleads.v8.errors.ErrorCode.custom_audience_error:type_name -> google.ads.googleads.v8.errors.CustomAudienceErrorEnum.CustomAudienceError
10, // 126: google.ads.googleads.v8.errors.ErrorLocation.field_path_elements:type_name -> google.ads.googleads.v8.errors.ErrorLocation.FieldPathElement
6, // 127: google.ads.googleads.v8.errors.ErrorDetails.policy_violation_details:type_name -> google.ads.googleads.v8.errors.PolicyViolationDetails
7, // 128: google.ads.googleads.v8.errors.ErrorDetails.policy_finding_details:type_name -> google.ads.googleads.v8.errors.PolicyFindingDetails
8, // 129: google.ads.googleads.v8.errors.ErrorDetails.quota_error_details:type_name -> google.ads.googleads.v8.errors.QuotaErrorDetails
9, // 130: google.ads.googleads.v8.errors.ErrorDetails.resource_count_details:type_name -> google.ads.googleads.v8.errors.ResourceCountDetails
133, // 131: google.ads.googleads.v8.errors.PolicyViolationDetails.key:type_name -> google.ads.googleads.v8.common.PolicyViolationKey
134, // 132: google.ads.googleads.v8.errors.PolicyFindingDetails.policy_topic_entries:type_name -> google.ads.googleads.v8.common.PolicyTopicEntry
0, // 133: google.ads.googleads.v8.errors.QuotaErrorDetails.rate_scope:type_name -> google.ads.googleads.v8.errors.QuotaErrorDetails.QuotaRateScope
135, // 134: google.ads.googleads.v8.errors.QuotaErrorDetails.retry_delay:type_name -> google.protobuf.Duration
136, // 135: google.ads.googleads.v8.errors.ResourceCountDetails.limit_type:type_name -> google.ads.googleads.v8.enums.ResourceLimitTypeEnum.ResourceLimitType
136, // [136:136] is the sub-list for method output_type
136, // [136:136] is the sub-list for method input_type
136, // [136:136] is the sub-list for extension type_name
136, // [136:136] is the sub-list for extension extendee
0, // [0:136] is the sub-list for field type_name
}
func init() { file_google_ads_googleads_v8_errors_errors_proto_init() }
func | () {
if File_google_ads_googleads_v8_errors_errors_proto != nil {
return
}
file_google_ads_googleads_v8_errors_access_invitation_error_proto_init()
file_google_ads_googleads_v8_errors_account_budget_proposal_error_proto_init()
file_google_ads_googleads_v8_errors_account_link_error_proto_init()
file_google_ads_googleads_v8_errors_ad_customizer_error_proto_init()
file_google_ads_googleads_v8_errors_ad_error_proto_init()
file_google_ads_googleads_v8_errors_ad_group_ad_error_proto_init()
file_google_ads_googleads_v8_errors_ad_group_bid_modifier_error_proto_init()
file_google_ads_googleads_v8_errors_ad_group_criterion_error_proto_init()
file_google_ads_googleads_v8_errors_ad_group_error_proto_init()
file_google_ads_googleads_v8_errors_ad_group_feed_error_proto_init()
file_google_ads_googleads_v8_errors_ad_parameter_error_proto_init()
file_google_ads_googleads_v8_errors_ad_sharing_error_proto_init()
file_google_ads_googleads_v8_errors_adx_error_proto_init()
file_google_ads_googleads_v8_errors_asset_error_proto_init()
file_google_ads_googleads_v8_errors_asset_link_error_proto_init()
file_google_ads_googleads_v8_errors_authentication_error_proto_init()
file_google_ads_googleads_v8_errors_authorization_error_proto_init()
file_google_ads_googleads_v8_errors_batch_job_error_proto_init()
file_google_ads_googleads_v8_errors_bidding_error_proto_init()
file_google_ads_googleads_v8_errors_bidding_strategy_error_proto_init()
file_google_ads_googleads_v8_errors_billing_setup_error_proto_init()
file_google_ads_googleads_v8_errors_campaign_budget_error_proto_init()
file_google_ads_googleads_v8_errors_campaign_criterion_error_proto_init()
file_google_ads_googleads_v8_errors_campaign_draft_error_proto_init()
file_google_ads_googleads_v8_errors_campaign_error_proto_init()
file_google_ads_googleads_v8_errors_campaign_experiment_error_proto_init()
file_google_ads_googleads_v8_errors_campaign_feed_error_proto_init()
file_google_ads_googleads_v8_errors_campaign_shared_set_error_proto_init()
file_google_ads_googleads_v8_errors_change_event_error_proto_init()
file_google_ads_googleads_v8_errors_change_status_error_proto_init()
file_google_ads_googleads_v8_errors_collection_size_error_proto_init()
file_google_ads_googleads_v8_errors_context_error_proto_init()
file_google_ads_googleads_v8_errors_conversion_action_error_proto_init()
file_google_ads_googleads_v8_errors_conversion_adjustment_upload_error_proto_init()
file_google_ads_googleads_v8_errors_conversion_custom_variable_error_proto_init()
file_google_ads_googleads_v8_errors_conversion_upload_error_proto_init()
file_google_ads_googleads_v8_errors_conversion_value_rule_error_proto_init()
file_google_ads_googleads_v8_errors_conversion_value_rule_set_error_proto_init()
file_google_ads_googleads_v8_errors_country_code_error_proto_init()
file_google_ads_googleads_v8_errors_criterion_error_proto_init()
file_google_ads_googleads_v8_errors_currency_code_error_proto_init()
file_google_ads_googleads_v8_errors_custom_audience_error_proto_init()
file_google_ads_googleads_v8_errors_custom_interest_error_proto_init()
file_google_ads_googleads_v8_errors_customer_client_link_error_proto_init()
file_google_ads_googleads_v8_errors_customer_error_proto_init()
file_google_ads_googleads_v8_errors_customer_feed_error_proto_init()
file_google_ads_googleads_v8_errors_customer_manager_link_error_proto_init()
file_google_ads_googleads_v8_errors_customer_user_access_error_proto_init()
file_google_ads_googleads_v8_errors_database_error_proto_init()
file_google_ads_googleads_v8_errors_date_error_proto_init()
file_google_ads_googleads_v8_errors_date_range_error_proto_init()
file_google_ads_googleads_v8_errors_distinct_error_proto_init()
file_google_ads_googleads_v8_errors_enum_error_proto_init()
file_google_ads_googleads_v8_errors_extension_feed_item_error_proto_init()
file_google_ads_googleads_v8_errors_extension_setting_error_proto_init()
file_google_ads_googleads_v8_errors_feed_attribute_reference_error_proto_init()
file_google_ads_googleads_v8_errors_feed_error_proto_init()
file_google_ads_googleads_v8_errors_feed_item_error_proto_init()
file_google_ads_googleads_v8_errors_feed_item_set_error_proto_init()
file_google_ads_googleads_v8_errors_feed_item_set_link_error_proto_init()
file_google_ads_googleads_v8_errors_feed_item_target_error_proto_init()
file_google_ads_googleads_v8_errors_feed_item_validation_error_proto_init()
file_google_ads_googleads_v8_errors_feed_mapping_error_proto_init()
file_google_ads_googleads_v8_errors_field_error_proto_init()
file_google_ads_googleads_v8_errors_field_mask_error_proto_init()
file_google_ads_googleads_v8_errors_function_error_proto_init()
file_google_ads_googleads_v8_errors_function_parsing_error_proto_init()
file_google_ads_googleads_v8_errors_geo_target_constant_suggestion_error_proto_init()
file_google_ads_googleads_v8_errors_header_error_proto_init()
file_google_ads_googleads_v8_errors_id_error_proto_init()
file_google_ads_googleads_v8_errors_image_error_proto_init()
file_google_ads_googleads_v8_errors_internal_error_proto_init()
file_google_ads_googleads_v8_errors_invoice_error_proto_init()
file_google_ads_googleads_v8_errors_keyword_plan_ad_group_error_proto_init()
file_google_ads_googleads_v8_errors_keyword_plan_ad_group_keyword_error_proto_init()
file_google_ads_googleads_v8_errors_keyword_plan_campaign_error_proto_init()
file_google_ads_googleads_v8_errors_keyword_plan_campaign_keyword_error_proto_init()
file_google_ads_googleads_v8_errors_keyword_plan_error_proto_init()
file_google_ads_googleads_v8_errors_keyword_plan_idea_error_proto_init()
file_google_ads_googleads_v8_errors_label_error_proto_init()
file_google_ads_googleads_v8_errors_language_code_error_proto_init()
file_google_ads_googleads_v8_errors_list_operation_error_proto_init()
file_google_ads_googleads_v8_errors_manager_link_error_proto_init()
file_google_ads_googleads_v8_errors_media_bundle_error_proto_init()
file_google_ads_googleads_v8_errors_media_file_error_proto_init()
file_google_ads_googleads_v8_errors_media_upload_error_proto_init()
file_google_ads_googleads_v8_errors_multiplier_error_proto_init()
file_google_ads_googleads_v8_errors_mutate_error_proto_init()
file_google_ads_googleads_v8_errors_new_resource_creation_error_proto_init()
file_google_ads_googleads_v8_errors_not_allowlisted_error_proto_init()
file_google_ads_googleads_v8_errors_not_empty_error_proto_init()
file_google_ads_googleads_v8_errors_null_error_proto_init()
file_google_ads_googleads_v8_errors_offline_user_data_job_error_proto_init()
file_google_ads_googleads_v8_errors_operation_access_denied_error_proto_init()
file_google_ads_googleads_v8_errors_operator_error_proto_init()
file_google_ads_googleads_v8_errors_partial_failure_error_proto_init()
file_google_ads_googleads_v8_errors_payments_account_error_proto_init()
file_google_ads_googleads_v8_errors_policy_finding_error_proto_init()
file_google_ads_googleads_v8_errors_policy_validation_parameter_error_proto_init()
file_google_ads_googleads_v8_errors_policy_violation_error_proto_init()
file_google_ads_googleads_v8_errors_query_error_proto_init()
file_google_ads_googleads_v8_errors_quota_error_proto_init()
file_google_ads_googleads_v8_errors_range_error_proto_init()
file_google_ads_googleads_v8_errors_reach_plan_error_proto_init()
file_google_ads_googleads_v8_errors_recommendation_error_proto_init()
file_google_ads_googleads_v8_errors_region_code_error_proto_init()
file_google_ads_googleads_v8_errors_request_error_proto_init()
file_google_ads_googleads_v8_errors_resource_access_denied_error_proto_init()
file_google_ads_googleads_v8_errors_resource_count_limit_exceeded_error_proto_init()
file_google_ads_googleads_v8_errors_setting_error_proto_init()
file_google_ads_googleads_v8_errors_shared_criterion_error_proto_init()
file_google_ads_googleads_v8_errors_shared_set_error_proto_init()
file_google_ads_googleads_v8_errors_size_limit_error_proto_init()
file_google_ads_googleads_v8_errors_string_format_error_proto_init()
file_google_ads_googleads_v8_errors_string_length_error_proto_init()
file_google_ads_googleads_v8_errors_third_party_app_analytics_link_error_proto_init()
file_google_ads_googleads_v8_errors_time_zone_error_proto_init()
file_google_ads_googleads_v8_errors_url_field_error_proto_init()
file_google_ads_googleads_v8_errors_user_data_error_proto_init()
file_google_ads_googleads_v8_errors_user_list_error_proto_init()
file_google_ads_googleads_v8_errors_youtube_video_registration_error_proto_init()
if !protoimpl.UnsafeEnabled {
file_google_ads_googleads_v8_errors_errors_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GoogleAdsFailure); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_google_ads_googleads_v8_errors_errors_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GoogleAdsError); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_google_ads_googleads_v8_errors_errors_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ErrorCode); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_google_ads_googleads_v8_errors_errors_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ErrorLocation); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_google_ads_googleads_v8_errors_errors_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ErrorDetails); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_google_ads_googleads_v8_errors_errors_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*PolicyViolationDetails); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_google_ads_googleads_v8_errors_errors_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*PolicyFindingDetails); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_google_ads_googleads_v8_errors_errors_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*QuotaErrorDetails); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_google_ads_googleads_v8_errors_errors_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ResourceCountDetails); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_google_ads_googleads_v8_errors_errors_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ErrorLocation_FieldPathElement); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
file_google_ads_googleads_v8_errors_errors_proto_msgTypes[2].OneofWrappers = []interface{}{
(*ErrorCode_RequestError)(nil),
(*ErrorCode_BiddingStrategyError)(nil),
(*ErrorCode_UrlFieldError)(nil),
(*ErrorCode_ListOperationError)(nil),
(*ErrorCode_QueryError)(nil),
(*ErrorCode_MutateError)(nil),
(*ErrorCode_FieldMaskError)(nil),
(*ErrorCode_AuthorizationError)(nil),
(*ErrorCode_InternalError)(nil),
(*ErrorCode_QuotaError)(nil),
(*ErrorCode_AdError)(nil),
(*ErrorCode_AdGroupError)(nil),
(*ErrorCode_CampaignBudgetError)(nil),
(*ErrorCode_CampaignError)(nil),
(*ErrorCode_AuthenticationError)(nil),
(*ErrorCode_AdGroupCriterionError)(nil),
(*ErrorCode_AdCustomizerError)(nil),
(*ErrorCode_AdGroupAdError)(nil),
(*ErrorCode_AdSharingError)(nil),
(*ErrorCode_AdxError)(nil),
(*ErrorCode_AssetError)(nil),
(*ErrorCode_BiddingError)(nil),
(*ErrorCode_CampaignCriterionError)(nil),
(*ErrorCode_CollectionSizeError)(nil),
(*ErrorCode_CountryCodeError)(nil),
(*ErrorCode_CriterionError)(nil),
(*ErrorCode_CustomerError)(nil),
(*ErrorCode_DateError)(nil),
(*ErrorCode_DateRangeError)(nil),
(*ErrorCode_DistinctError)(nil),
(*ErrorCode_FeedAttributeReferenceError)(nil),
(*ErrorCode_FunctionError)(nil),
(*ErrorCode_FunctionParsingError)(nil),
(*ErrorCode_IdError)(nil),
(*ErrorCode_ImageError)(nil),
(*ErrorCode_LanguageCodeError)(nil),
(*ErrorCode_MediaBundleError)(nil),
(*ErrorCode_MediaUploadError)(nil),
(*ErrorCode_MediaFileError)(nil),
(*ErrorCode_MultiplierError)(nil),
(*ErrorCode_NewResourceCreationError)(nil),
(*ErrorCode_NotEmptyError)(nil),
(*ErrorCode_NullError)(nil),
(*ErrorCode_OperatorError)(nil),
(*ErrorCode_RangeError)(nil),
(*ErrorCode_RecommendationError)(nil),
(*ErrorCode_RegionCodeError)(nil),
(*ErrorCode_SettingError)(nil),
(*ErrorCode_StringFormatError)(nil),
(*ErrorCode_StringLengthError)(nil),
(*ErrorCode_OperationAccessDeniedError)(nil),
(*ErrorCode_ResourceAccessDeniedError)(nil),
(*ErrorCode_ResourceCountLimitExceededError)(nil),
(*ErrorCode_YoutubeVideoRegistrationError)(nil),
(*ErrorCode_AdGroupBidModifierError)(nil),
(*ErrorCode_ContextError)(nil),
(*ErrorCode_FieldError)(nil),
(*ErrorCode_SharedSetError)(nil),
(*ErrorCode_SharedCriterionError)(nil),
(*ErrorCode_CampaignSharedSetError)(nil),
(*ErrorCode_ConversionActionError)(nil),
(*ErrorCode_ConversionAdjustmentUploadError)(nil),
(*ErrorCode_ConversionCustomVariableError)(nil),
(*ErrorCode_ConversionUploadError)(nil),
(*ErrorCode_ConversionValueRuleError)(nil),
(*ErrorCode_ConversionValueRuleSetError)(nil),
(*ErrorCode_HeaderError)(nil),
(*ErrorCode_DatabaseError)(nil),
(*ErrorCode_PolicyFindingError)(nil),
(*ErrorCode_EnumError)(nil),
(*ErrorCode_KeywordPlanError)(nil),
(*ErrorCode_KeywordPlanCampaignError)(nil),
(*ErrorCode_KeywordPlanCampaignKeywordError)(nil),
(*ErrorCode_KeywordPlanAdGroupError)(nil),
(*ErrorCode_KeywordPlanAdGroupKeywordError)(nil),
(*ErrorCode_KeywordPlanIdeaError)(nil),
(*ErrorCode_AccountBudgetProposalError)(nil),
(*ErrorCode_UserListError)(nil),
(*ErrorCode_ChangeEventError)(nil),
(*ErrorCode_ChangeStatusError)(nil),
(*ErrorCode_FeedError)(nil),
(*ErrorCode_GeoTargetConstantSuggestionError)(nil),
(*ErrorCode_CampaignDraftError)(nil),
(*ErrorCode_FeedItemError)(nil),
(*ErrorCode_LabelError)(nil),
(*ErrorCode_BillingSetupError)(nil),
(*ErrorCode_CustomerClientLinkError)(nil),
(*ErrorCode_CustomerManagerLinkError)(nil),
(*ErrorCode_FeedMappingError)(nil),
(*ErrorCode_CustomerFeedError)(nil),
(*ErrorCode_AdGroupFeedError)(nil),
(*ErrorCode_CampaignFeedError)(nil),
(*ErrorCode_CustomInterestError)(nil),
(*ErrorCode_CampaignExperimentError)(nil),
(*ErrorCode_ExtensionFeedItemError)(nil),
(*ErrorCode_AdParameterError)(nil),
(*ErrorCode_FeedItemValidationError)(nil),
(*ErrorCode_ExtensionSettingError)(nil),
(*ErrorCode_FeedItemSetError)(nil),
(*ErrorCode_FeedItemSetLinkError)(nil),
(*ErrorCode_FeedItemTargetError)(nil),
(*ErrorCode_PolicyViolationError)(nil),
(*ErrorCode_PartialFailureError)(nil),
(*ErrorCode_PolicyValidationParameterError)(nil),
(*ErrorCode_SizeLimitError)(nil),
(*ErrorCode_OfflineUserDataJobError)(nil),
(*ErrorCode_NotAllowlistedError)(nil),
(*ErrorCode_ManagerLinkError)(nil),
(*ErrorCode_CurrencyCodeError)(nil),
(*ErrorCode_AccessInvitationError)(nil),
(*ErrorCode_ReachPlanError)(nil),
(*ErrorCode_InvoiceError)(nil),
(*ErrorCode_PaymentsAccountError)(nil),
(*ErrorCode_TimeZoneError)(nil),
(*ErrorCode_AssetLinkError)(nil),
(*ErrorCode_UserDataError)(nil),
(*ErrorCode_BatchJobError)(nil),
(*ErrorCode_AccountLinkError)(nil),
(*ErrorCode_ThirdPartyAppAnalyticsLinkError)(nil),
(*ErrorCode_CustomerUserAccessError)(nil),
(*ErrorCode_CustomAudienceError)(nil),
}
file_google_ads_googleads_v8_errors_errors_proto_msgTypes[9].OneofWrappers = []interface{}{}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_google_ads_googleads_v8_errors_errors_proto_rawDesc,
NumEnums: 1,
NumMessages: 10,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_google_ads_googleads_v8_errors_errors_proto_goTypes,
DependencyIndexes: file_google_ads_googleads_v8_errors_errors_proto_depIdxs,
EnumInfos: file_google_ads_googleads_v8_errors_errors_proto_enumTypes,
MessageInfos: file_google_ads_googleads_v8_errors_errors_proto_msgTypes,
}.Build()
File_google_ads_googleads_v8_errors_errors_proto = out.File
file_google_ads_googleads_v8_errors_errors_proto_rawDesc = nil
file_google_ads_googleads_v8_errors_errors_proto_goTypes = nil
file_google_ads_googleads_v8_errors_errors_proto_depIdxs = nil
}
| file_google_ads_googleads_v8_errors_errors_proto_init |
image_link_optimizer_test.py | # coding=utf-8
# Copyright 2021 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for image_link_optimizer.py."""
import json
import time
from typing import Any, Dict, Iterable, List
from unittest import mock
import urllib.error
from absl.testing import absltest
from shoptimizer_api import constants
import flask
from shoptimizer_api.optimizers_builtin import image_link_optimizer
from shoptimizer_api.test_data import requests_bodies
from shoptimizer_api.util import app_util
from shoptimizer_api.util import image_util
from shoptimizer_api.util import networking
def _build_list_of_image_links(num_links: int,
file_type: str = 'jpg') -> List[str]:
return [f'https://examples.com/image{n}.{file_type}'
for n in list(range(num_links))]
def _request_body_from_image_links(links: Iterable[str]) -> Dict[str, Any]:
return requests_bodies.build_request_body(properties_to_be_updated={
'imageLink': links[0],
'additionalImageLink': links[1:]
})
def _setup_flask_with_configs_only():
app = flask.Flask(__name__)
app.config['CONFIGS'] = app_util._load_all_configs()
app.app_context().push()
@mock.patch.object(image_link_optimizer, '_CONFIG_FILE_NAME',
new='image_link_optimizer_config_test')
class ImageLinkOptimizerTest(absltest.TestCase):
def setUp(self):
super().setUp()
_setup_flask_with_configs_only()
# By default, mock load_bytes_at_url to return empty bytes
self.mock_urlopen = self.enter_context(
mock.patch.object(networking, 'load_bytes_at_url', return_value=b'',
autospec=True))
# By default, mock the ML model to avoid scoring each image
self.mock_model = self.enter_context(
mock.patch.object(image_util, 'score_image', return_value=float('inf'),
autospec=True))
self.optimizer = image_link_optimizer.ImageLinkOptimizer(
image_link_optimizer.CONFIGURATION_DEFAULTS)
def test_config_uses_defaults_if_no_config_file_or_assignment(self):
with mock.patch.object(image_link_optimizer, '_CONFIG_FILE_NAME', 'file'):
optimizer = image_link_optimizer.ImageLinkOptimizer()
self.assertEqual(
image_link_optimizer
.CONFIGURATION_DEFAULTS['require_image_can_be_downloaded'],
optimizer.require_image_can_be_downloaded)
self.assertEqual(
image_link_optimizer
.CONFIGURATION_DEFAULTS['require_image_score_quality_better_than'],
optimizer.require_image_score_quality_better_than)
def test_config_uses_config_file_if_no_assignment(self):
with open(f'shoptimizer_api/config/{image_link_optimizer._CONFIG_FILE_NAME}.json') as f:
file_config = json.load(f)
optimizer = image_link_optimizer.ImageLinkOptimizer()
self.assertEqual(
file_config['require_image_can_be_downloaded'],
optimizer.require_image_can_be_downloaded)
self.assertEqual(
file_config['require_image_score_quality_better_than'],
optimizer.require_image_score_quality_better_than)
def test_config_uses_assignment_if_available(self):
assignments = {
'require_image_can_be_downloaded': True,
'require_image_score_quality_better_than': float('inf')
}
optimizer = image_link_optimizer.ImageLinkOptimizer(assignments)
self.assertEqual(
assignments['require_image_can_be_downloaded'],
optimizer.require_image_can_be_downloaded)
self.assertEqual(
assignments['require_image_score_quality_better_than'],
optimizer.require_image_score_quality_better_than)
def test_negative_require_image_score_quality_better_than_set_to_zero(self):
optimizer = image_link_optimizer.ImageLinkOptimizer({
'require_image_score_quality_better_than': -1
})
self.assertEqual(0, optimizer.require_image_score_quality_better_than)
def test_raises_if_invalid_require_image_score_quality_better_than(self):
with self.assertRaises(ValueError):
image_link_optimizer.ImageLinkOptimizer({
'require_image_score_quality_better_than': 'some string'
})
def test_optimizer_does_nothing_when_alternate_image_links_missing(self):
original_data = requests_bodies.build_request_body(
properties_to_be_removed=['additionalImageLink'])
optimized_data, optimization_result = self.optimizer.process(original_data)
product = optimized_data['entries'][0]['product']
self.assertNotIn('additionalImageLink', product)
self.assertEqual(0, optimization_result.num_of_products_optimized)
def test_optimizer_does_nothing_when_alternate_image_links_valid(self):
image_links = _build_list_of_image_links(3)
original_data = requests_bodies.build_request_body(
properties_to_be_updated={'additionalImageLink': image_links})
optimized_data, optimization_result = self.optimizer.process(original_data)
product = optimized_data['entries'][0]['product']
self.assertEqual(image_links, product['additionalImageLink'])
self.assertEqual(0, optimization_result.num_of_products_optimized)
def test_optimizer_does_not_remove_image_links_when_not_above_maximum(self):
image_links = _build_list_of_image_links(constants.MAX_ALTERNATE_IMAGE_URLS)
original_data = requests_bodies.build_request_body(
properties_to_be_updated={'additionalImageLink': image_links})
optimized_data, optimization_result = self.optimizer.process(original_data)
product = optimized_data['entries'][0]['product']
self.assertEqual(image_links, product['additionalImageLink'])
self.assertEqual(0, optimization_result.num_of_products_optimized)
def test_optimizer_truncates_additional_images_above_maximum(self):
image_links = _build_list_of_image_links(
constants.MAX_ALTERNATE_IMAGE_URLS + 1)
original_data = requests_bodies.build_request_body(
properties_to_be_updated={'additionalImageLink': image_links})
optimized_data, optimization_result = self.optimizer.process(original_data)
product = optimized_data['entries'][0]['product']
self.assertEqual(image_links[:constants.MAX_ALTERNATE_IMAGE_URLS],
product['additionalImageLink'])
self.assertEqual(1, optimization_result.num_of_products_optimized)
def test_optimizer_requests_data_from_all_image_urls(self):
image_links = _build_list_of_image_links(3)
self.optimizer.process(_request_body_from_image_links(image_links))
self.mock_urlopen.assert_has_calls(
[mock.call(image_links[0]),
mock.call(image_links[1]),
mock.call(image_links[2])],
any_order=True)
def test_doesnt_download_urls_if_not_require_image_can_be_downloaded(self):
image_links = _build_list_of_image_links(3)
optimizer = image_link_optimizer.ImageLinkOptimizer({
'require_image_can_be_downloaded': False
})
optimizer.process(_request_body_from_image_links(image_links))
self.mock_urlopen.assert_not_called()
def | (self):
image_links = _build_list_of_image_links(3)
optimizer = image_link_optimizer.ImageLinkOptimizer({
'require_image_can_be_downloaded': False
})
optimizer.process(_request_body_from_image_links(image_links))
self.mock_model.assert_not_called()
def test_optimizer_does_not_request_from_nonhttp_urls(self):
image_links = _build_list_of_image_links(2)
image_links[0] = 'ftp://google.com/image.jpg'
self.optimizer.process(_request_body_from_image_links(image_links))
self.assertNotIn(
mock.call(image_links[0]), self.mock_urlopen.call_args_list)
def test_optimizer_does_not_request_from_long_urls(self):
image_links = _build_list_of_image_links(2)
many_zeros = '0' * constants.MAX_IMAGE_URL_LENGTH
image_links[0] = f'https://google.com/image{many_zeros}.jpg'
self.optimizer.process(_request_body_from_image_links(image_links))
self.assertNotIn(
mock.call(image_links[0]), self.mock_urlopen.call_args_list)
def test_does_not_remove_additional_images_with_errors_below_max(self):
image_links = _build_list_of_image_links(3)
responses = [b''] * len(image_links)
responses[1] = urllib.error.HTTPError(image_links[1], 500, 'Internal Error',
{}, None)
with mock.patch.object(networking, 'load_bytes_at_url') as mock_request:
mock_request.side_effect = responses
optimized_data, optimization_result = self.optimizer.process(
_request_body_from_image_links(image_links))
product = optimized_data['entries'][0]['product']
self.assertEqual(image_links[0], product['imageLink'])
self.assertEqual(image_links[1:], product['additionalImageLink'])
self.assertEqual(0, optimization_result.num_of_products_optimized)
def test_scores_all_valid_images(self):
image_links = _build_list_of_image_links(3)
responses = bytearray('ABCDEF', 'ASCII')
with mock.patch.object(networking, 'load_bytes_at_url') as mock_request:
mock_request.side_effect = responses
self.optimizer.process(_request_body_from_image_links(image_links))
self.mock_model.assert_has_calls([
mock.call(responses[0]),
mock.call(responses[1]),
mock.call(responses[2])
], any_order=True)
def test_does_not_score_images_with_no_content(self):
image_links = _build_list_of_image_links(3)
responses = [b''] * len(image_links)
with mock.patch.object(networking, 'load_bytes_at_url') as mock_request:
mock_request.side_effect = responses
self.optimizer.process(_request_body_from_image_links(image_links))
self.mock_model.assert_not_called()
def test_does_not_score_images_if_minimum_score_is_infinite(self):
image_links = _build_list_of_image_links(3)
assignments = {
'require_image_can_be_downloaded': True,
'require_image_score_quality_better_than': float('inf')
}
optimizer = image_link_optimizer.ImageLinkOptimizer(assignments)
responses = bytearray('ABCDEF', 'ASCII')
with mock.patch.object(networking, 'load_bytes_at_url') as mock_request:
mock_request.side_effect = responses
optimizer.process(_request_body_from_image_links(image_links))
self.mock_model.assert_not_called()
def test_does_not_score_images_with_url_errors(self):
image_links = _build_list_of_image_links(3)
responses = [urllib.error.HTTPError(link, 500, 'Internal Error', {}, None)
for link in image_links]
with mock.patch.object(networking, 'load_bytes_at_url') as mock_request:
mock_request.side_effect = responses
self.optimizer.process(_request_body_from_image_links(image_links))
self.mock_model.assert_not_called()
def test_preferentially_removes_images_with_invalid_urls(self):
image_links = _build_list_of_image_links(
constants.MAX_ALTERNATE_IMAGE_URLS + 2)
image_links[1] = 'ftp://google.com/image.jpg'
responses = [b''] * len(image_links)
with mock.patch.object(networking, 'load_bytes_at_url') as mock_request:
mock_request.side_effect = responses
optimized_data, optimization_result = self.optimizer.process(
_request_body_from_image_links(image_links))
product = optimized_data['entries'][0]['product']
# Expect to remove the 1st additional image link
expected_links = image_links[2:]
self.assertEqual(image_links[0], product['imageLink'])
self.assertEqual(expected_links, product['additionalImageLink'])
self.assertEqual(1, optimization_result.num_of_products_optimized)
def test_preferentially_removes_images_above_size_limit(self):
image_links = _build_list_of_image_links(
constants.MAX_ALTERNATE_IMAGE_URLS + 2)
responses = [b''] * len(image_links)
responses[1] = b'0' * (constants.MAX_IMAGE_FILE_SIZE_BYTES + 1)
with mock.patch.object(networking, 'load_bytes_at_url') as mock_request:
mock_request.side_effect = responses
optimized_data, optimization_result = self.optimizer.process(
_request_body_from_image_links(image_links))
product = optimized_data['entries'][0]['product']
# Expect to remove the 1st additional image link
expected_links = image_links[2:]
self.assertEqual(image_links[0], product['imageLink'])
self.assertEqual(expected_links, product['additionalImageLink'])
self.assertEqual(1, optimization_result.num_of_products_optimized)
def test_preferentially_removes_images_with_errors_above_max(self):
image_links = _build_list_of_image_links(13)
responses = [b''] * len(image_links)
responses[4] = urllib.error.HTTPError(image_links[4], 500,
'Internal Error', {}, None)
responses[8] = urllib.error.HTTPError(image_links[8], 500,
'Internal Error', {}, None)
with mock.patch.object(networking, 'load_bytes_at_url') as mock_request:
mock_request.side_effect = responses
optimized_data, optimization_result = self.optimizer.process(
_request_body_from_image_links(image_links))
product = optimized_data['entries'][0]['product']
# Expect to remove the 4th and 8th image due to errors
expected_links = image_links[1:4] + image_links[5:8] + image_links[9:]
self.assertEqual(image_links[0], product['imageLink'])
self.assertEqual(expected_links, product['additionalImageLink'])
self.assertEqual(1, optimization_result.num_of_products_optimized)
def test_first_removes_errors_above_max_then_truncates_at_max(self):
image_links = _build_list_of_image_links(13)
responses = [b''] * len(image_links)
responses[4] = urllib.error.HTTPError(image_links[1], 500,
'Internal Error', {}, None)
with mock.patch.object(networking, 'load_bytes_at_url') as mock_request:
mock_request.side_effect = responses
optimized_data, optimization_result = self.optimizer.process(
_request_body_from_image_links(image_links))
product = optimized_data['entries'][0]['product']
# Expect to remove the 4th image due to error and the last from truncation
expected_links = image_links[1:4] + image_links[5:-1]
self.assertEqual(image_links[0], product['imageLink'])
self.assertEqual(expected_links, product['additionalImageLink'])
self.assertEqual(1, optimization_result.num_of_products_optimized)
def test_swaps_on_primary_image_error_with_alternate_available(self):
image_links = _build_list_of_image_links(3)
responses = [b''] * len(image_links)
responses[0] = urllib.error.HTTPError(image_links[0], 500,
'Internal Error', {}, None)
with mock.patch.object(networking, 'load_bytes_at_url') as mock_request:
mock_request.side_effect = responses
optimized_data, optimization_result = self.optimizer.process(
_request_body_from_image_links(image_links))
product = optimized_data['entries'][0]['product']
self.assertEqual(image_links[1], product['imageLink'])
expected_links = [image_links[0]] + image_links[2:]
self.assertEqual(expected_links, product['additionalImageLink'])
self.assertEqual(1, optimization_result.num_of_products_optimized)
def test_swaps_on_primary_image_error_with_any_alternate_available(self):
image_links = _build_list_of_image_links(3)
responses = [b''] * len(image_links)
responses[0] = urllib.error.HTTPError(image_links[0], 500,
'Internal Error', {}, None)
responses[1] = urllib.error.HTTPError(image_links[1], 500,
'Internal Error', {}, None)
with mock.patch.object(networking, 'load_bytes_at_url') as mock_request:
mock_request.side_effect = responses
optimized_data, optimization_result = self.optimizer.process(
_request_body_from_image_links(image_links))
product = optimized_data['entries'][0]['product']
self.assertEqual(image_links[2], product['imageLink'])
# Ensure imageLink swapped with 2nd alternate, since the 1st is an error
expected_links = [image_links[1], image_links[0]]
self.assertEqual(expected_links, product['additionalImageLink'])
self.assertEqual(1, optimization_result.num_of_products_optimized)
def test_preferentially_chooses_lowest_scoring_image(self):
image_links = _build_list_of_image_links(5)
image_responses = [b'101010'] * len(image_links)
image_responses[0] = urllib.error.HTTPError(image_links[0], 500,
'Internal Error', {}, None)
score_responses = [0.75, 0.5, 0.25, 1.0]
with mock.patch.object(networking, 'load_bytes_at_url') as mock_network:
mock_network.side_effect = image_responses
with mock.patch.object(image_util, 'score_image') as mock_model:
mock_model.side_effect = score_responses
optimized_data, optimization_result = self.optimizer.process(
_request_body_from_image_links(image_links))
product = optimized_data['entries'][0]['product']
# Ensure imageLink swapped with 3rd alternate; that has the lowest score
self.assertEqual(image_links[3], product['imageLink'])
expected_links = [image_links[1], image_links[2],
image_links[0], image_links[4]]
self.assertEqual(expected_links, product['additionalImageLink'])
self.assertEqual(1, optimization_result.num_of_products_optimized)
def test_images_scoring_below_threshold_are_considered_invalid(self):
image_links = _build_list_of_image_links(3)
image_responses = [b'101010'] * len(image_links)
score_responses = [0.75, 0.25, 1.0]
assignments = {
'require_image_can_be_downloaded': True,
'require_image_score_quality_better_than': 0.5
}
optimizer = image_link_optimizer.ImageLinkOptimizer(assignments)
with mock.patch.object(networking, 'load_bytes_at_url') as mock_network:
mock_network.side_effect = image_responses
with mock.patch.object(image_util, 'score_image') as mock_model:
mock_model.side_effect = score_responses
optimized_data, optimization_result = optimizer.process(
_request_body_from_image_links(image_links))
product = optimized_data['entries'][0]['product']
# Ensure imageLink swapped with 1st alternate; that has the lowest score
self.assertEqual(image_links[1], product['imageLink'])
expected_links = [image_links[0], image_links[2]]
self.assertEqual(expected_links, product['additionalImageLink'])
self.assertEqual(1, optimization_result.num_of_products_optimized)
def test_do_not_swap_images_if_better_alternates_score_below_threshold(self):
image_links = _build_list_of_image_links(3)
image_responses = [b'101010'] * len(image_links)
score_responses = [0.75, 0.6, 0.7]
assignments = {
'require_image_can_be_downloaded': True,
'require_image_score_quality_better_than': 0.5
}
optimizer = image_link_optimizer.ImageLinkOptimizer(assignments)
with mock.patch.object(networking, 'load_bytes_at_url') as mock_network:
mock_network.side_effect = image_responses
with mock.patch.object(image_util, 'score_image') as mock_model:
mock_model.side_effect = score_responses
optimized_data, optimization_result = optimizer.process(
_request_body_from_image_links(image_links))
product = optimized_data['entries'][0]['product']
self.assertEqual(image_links[0], product['imageLink'])
self.assertEqual(image_links[1:], product['additionalImageLink'])
self.assertEqual(0, optimization_result.num_of_products_optimized)
def test_does_not_swap_on_primary_image_error_if_no_alternate_available(self):
image_links = _build_list_of_image_links(3)
responses = [urllib.error.HTTPError(link, 500, 'Internal Error', {}, None)
for link in image_links]
with mock.patch.object(networking, 'load_bytes_at_url') as mock_request:
mock_request.side_effect = responses
optimized_data, optimization_result = self.optimizer.process(
_request_body_from_image_links(image_links))
product = optimized_data['entries'][0]['product']
self.assertEqual(image_links[0], product['imageLink'])
self.assertEqual(image_links[1:], product['additionalImageLink'])
self.assertEqual(0, optimization_result.num_of_products_optimized)
def test_downloads_images_in_parallel(self):
sleep_amount_secs = 0.25
image_links = _build_list_of_image_links(3)
def _wait_before_responding(*_args):
time.sleep(sleep_amount_secs)
return b''
with mock.patch.object(networking, 'load_bytes_at_url') as mock_request:
mock_request.side_effect = _wait_before_responding
start_time = time.time()
self.optimizer.process(_request_body_from_image_links(image_links))
end_time = time.time()
# Elapsed time < sum of the sleep times iff requests are in parallel
self.assertLess(end_time - start_time,
len(image_links) * sleep_amount_secs)
| test_doesnt_attempt_scoring_if_not_require_image_can_be_downloaded |
team_item_request_builder.go | package item
import (
ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9 "github.com/microsoft/kiota/abstractions/go"
i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87 "github.com/microsoftgraph/msgraph-sdk-go/models/microsoft/graph"
i7df4e557a1198b9abe14a17b40c7ac7db49b0d3050c749c3169541cb6f012b8b "github.com/microsoftgraph/msgraph-sdk-go/models/microsoft/graph/odataerrors"
i5363721350f81d4226008b2a0abc5fa76fabff34763dc7de5066c621e0c0f6be "github.com/microsoftgraph/msgraph-sdk-go/users/item/joinedteams/item/operations"
i539671039e993ec9795209a4951310067d75618d6c4524b564660e6d7f6dec35 "github.com/microsoftgraph/msgraph-sdk-go/users/item/joinedteams/item/template"
i6ea3db8c980a5b6b549dcba6aaa6b387b8c6b1cb073e6178a10627e6fb06a389 "github.com/microsoftgraph/msgraph-sdk-go/users/item/joinedteams/item/primarychannel"
i81bd7dd61df3d95dbc4620ad1479451afe1469239e95f230e4f09a46b785da75 "github.com/microsoftgraph/msgraph-sdk-go/users/item/joinedteams/item/schedule"
ib1deb2599b9a847d5026bfa2b1b44e59f2a829010f536668074417c636ce36db "github.com/microsoftgraph/msgraph-sdk-go/users/item/joinedteams/item/channels"
ic44d4f0e5acd07f5481d8f8b84a79cab11dfbee059cf6770fd4e2b23db0c1a93 "github.com/microsoftgraph/msgraph-sdk-go/users/item/joinedteams/item/group"
ic914f26c893a65c2d7a0c81250eacc4c0f9d860f64ab1b1cf1c364c6743699a8 "github.com/microsoftgraph/msgraph-sdk-go/users/item/joinedteams/item/installedapps"
ief730b7100e12b3385f77770a9d755bebd575023d52fdbb9414e8c86230b027d "github.com/microsoftgraph/msgraph-sdk-go/users/item/joinedteams/item/members"
i6d038dcd104603d7281b4eac363006d79b67e92eca3e8c364b49d02b84649d79 "github.com/microsoftgraph/msgraph-sdk-go/users/item/joinedteams/item/channels/item"
i98b219e6f0357bddb879690365c172b8f5fa9226021c11592a8062b6d8a9415e "github.com/microsoftgraph/msgraph-sdk-go/users/item/joinedteams/item/members/item"
ie5af489748d2fc36f15958b988ad08901566118f084112092561d1d1b446b768 "github.com/microsoftgraph/msgraph-sdk-go/users/item/joinedteams/item/installedapps/item"
ie78978d46a3704a561b7ecbd9e66a5ee9be50b99960e26229ec758c80ee98271 "github.com/microsoftgraph/msgraph-sdk-go/users/item/joinedteams/item/operations/item"
)
// TeamItemRequestBuilder provides operations to manage the joinedTeams property of the microsoft.graph.user entity.
type TeamItemRequestBuilder struct {
// Path parameters for the request
pathParameters map[string]string;
// The request adapter to use to execute the requests.
requestAdapter ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestAdapter;
// Url template to use to build the URL for the current request builder
urlTemplate string;
}
// TeamItemRequestBuilderDeleteOptions options for Delete
type TeamItemRequestBuilderDeleteOptions struct {
// Request headers
H map[string]string;
// Request options
O []ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestOption;
// Response handler to use in place of the default response handling provided by the core service
ResponseHandler ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ResponseHandler;
}
// TeamItemRequestBuilderGetOptions options for Get
type TeamItemRequestBuilderGetOptions struct {
// Request headers
H map[string]string;
// Request options
O []ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestOption;
// Request query parameters
Q *TeamItemRequestBuilderGetQueryParameters;
// Response handler to use in place of the default response handling provided by the core service
ResponseHandler ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ResponseHandler;
}
// TeamItemRequestBuilderGetQueryParameters the Microsoft Teams teams that the user is a member of. Read-only. Nullable.
type TeamItemRequestBuilderGetQueryParameters struct {
// Expand related entities
Expand []string;
// Select properties to be returned
Select []string;
}
// TeamItemRequestBuilderPatchOptions options for Patch
type TeamItemRequestBuilderPatchOptions struct {
//
Body i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87.Teamable;
// Request headers
H map[string]string; | // Response handler to use in place of the default response handling provided by the core service
ResponseHandler ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ResponseHandler;
}
func (m *TeamItemRequestBuilder) Channels()(*ib1deb2599b9a847d5026bfa2b1b44e59f2a829010f536668074417c636ce36db.ChannelsRequestBuilder) {
return ib1deb2599b9a847d5026bfa2b1b44e59f2a829010f536668074417c636ce36db.NewChannelsRequestBuilderInternal(m.pathParameters, m.requestAdapter);
}
// ChannelsById gets an item from the github.com/microsoftgraph/msgraph-sdk-go/.users.item.joinedTeams.item.channels.item collection
func (m *TeamItemRequestBuilder) ChannelsById(id string)(*i6d038dcd104603d7281b4eac363006d79b67e92eca3e8c364b49d02b84649d79.ChannelItemRequestBuilder) {
urlTplParams := make(map[string]string)
for idx, item := range m.pathParameters {
urlTplParams[idx] = item
}
if id != "" {
urlTplParams["channel_id"] = id
}
return i6d038dcd104603d7281b4eac363006d79b67e92eca3e8c364b49d02b84649d79.NewChannelItemRequestBuilderInternal(urlTplParams, m.requestAdapter);
}
// NewTeamItemRequestBuilderInternal instantiates a new TeamItemRequestBuilder and sets the default values.
func NewTeamItemRequestBuilderInternal(pathParameters map[string]string, requestAdapter ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestAdapter)(*TeamItemRequestBuilder) {
m := &TeamItemRequestBuilder{
}
m.urlTemplate = "{+baseurl}/users/{user_id}/joinedTeams/{team_id}{?select,expand}";
urlTplParams := make(map[string]string)
for idx, item := range pathParameters {
urlTplParams[idx] = item
}
m.pathParameters = urlTplParams;
m.requestAdapter = requestAdapter;
return m
}
// NewTeamItemRequestBuilder instantiates a new TeamItemRequestBuilder and sets the default values.
func NewTeamItemRequestBuilder(rawUrl string, requestAdapter ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestAdapter)(*TeamItemRequestBuilder) {
urlParams := make(map[string]string)
urlParams["request-raw-url"] = rawUrl
return NewTeamItemRequestBuilderInternal(urlParams, requestAdapter)
}
// CreateDeleteRequestInformation delete navigation property joinedTeams for users
func (m *TeamItemRequestBuilder) CreateDeleteRequestInformation(options *TeamItemRequestBuilderDeleteOptions)(*ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestInformation, error) {
requestInfo := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.DELETE
if options != nil && options.H != nil {
requestInfo.Headers = options.H
}
if options != nil && len(options.O) != 0 {
err := requestInfo.AddRequestOptions(options.O...)
if err != nil {
return nil, err
}
}
return requestInfo, nil
}
// CreateGetRequestInformation the Microsoft Teams teams that the user is a member of. Read-only. Nullable.
func (m *TeamItemRequestBuilder) CreateGetRequestInformation(options *TeamItemRequestBuilderGetOptions)(*ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestInformation, error) {
requestInfo := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.GET
if options != nil && options.Q != nil {
requestInfo.AddQueryParameters(*(options.Q))
}
if options != nil && options.H != nil {
requestInfo.Headers = options.H
}
if options != nil && len(options.O) != 0 {
err := requestInfo.AddRequestOptions(options.O...)
if err != nil {
return nil, err
}
}
return requestInfo, nil
}
// CreatePatchRequestInformation update the navigation property joinedTeams in users
func (m *TeamItemRequestBuilder) CreatePatchRequestInformation(options *TeamItemRequestBuilderPatchOptions)(*ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestInformation, error) {
requestInfo := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.PATCH
requestInfo.SetContentFromParsable(m.requestAdapter, "application/json", options.Body)
if options != nil && options.H != nil {
requestInfo.Headers = options.H
}
if options != nil && len(options.O) != 0 {
err := requestInfo.AddRequestOptions(options.O...)
if err != nil {
return nil, err
}
}
return requestInfo, nil
}
// Delete delete navigation property joinedTeams for users
func (m *TeamItemRequestBuilder) Delete(options *TeamItemRequestBuilderDeleteOptions)(error) {
requestInfo, err := m.CreateDeleteRequestInformation(options);
if err != nil {
return err
}
errorMapping := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ErrorMappings {
"4XX": i7df4e557a1198b9abe14a17b40c7ac7db49b0d3050c749c3169541cb6f012b8b.CreateODataErrorFromDiscriminatorValue,
"5XX": i7df4e557a1198b9abe14a17b40c7ac7db49b0d3050c749c3169541cb6f012b8b.CreateODataErrorFromDiscriminatorValue,
}
err = m.requestAdapter.SendNoContentAsync(requestInfo, nil, errorMapping)
if err != nil {
return err
}
return nil
}
// Get the Microsoft Teams teams that the user is a member of. Read-only. Nullable.
func (m *TeamItemRequestBuilder) Get(options *TeamItemRequestBuilderGetOptions)(i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87.Teamable, error) {
requestInfo, err := m.CreateGetRequestInformation(options);
if err != nil {
return nil, err
}
errorMapping := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ErrorMappings {
"4XX": i7df4e557a1198b9abe14a17b40c7ac7db49b0d3050c749c3169541cb6f012b8b.CreateODataErrorFromDiscriminatorValue,
"5XX": i7df4e557a1198b9abe14a17b40c7ac7db49b0d3050c749c3169541cb6f012b8b.CreateODataErrorFromDiscriminatorValue,
}
res, err := m.requestAdapter.SendAsync(requestInfo, i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87.CreateTeamFromDiscriminatorValue, nil, errorMapping)
if err != nil {
return nil, err
}
return res.(i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87.Teamable), nil
}
func (m *TeamItemRequestBuilder) Group()(*ic44d4f0e5acd07f5481d8f8b84a79cab11dfbee059cf6770fd4e2b23db0c1a93.GroupRequestBuilder) {
return ic44d4f0e5acd07f5481d8f8b84a79cab11dfbee059cf6770fd4e2b23db0c1a93.NewGroupRequestBuilderInternal(m.pathParameters, m.requestAdapter);
}
func (m *TeamItemRequestBuilder) InstalledApps()(*ic914f26c893a65c2d7a0c81250eacc4c0f9d860f64ab1b1cf1c364c6743699a8.InstalledAppsRequestBuilder) {
return ic914f26c893a65c2d7a0c81250eacc4c0f9d860f64ab1b1cf1c364c6743699a8.NewInstalledAppsRequestBuilderInternal(m.pathParameters, m.requestAdapter);
}
// InstalledAppsById gets an item from the github.com/microsoftgraph/msgraph-sdk-go/.users.item.joinedTeams.item.installedApps.item collection
func (m *TeamItemRequestBuilder) InstalledAppsById(id string)(*ie5af489748d2fc36f15958b988ad08901566118f084112092561d1d1b446b768.TeamsAppInstallationItemRequestBuilder) {
urlTplParams := make(map[string]string)
for idx, item := range m.pathParameters {
urlTplParams[idx] = item
}
if id != "" {
urlTplParams["teamsAppInstallation_id"] = id
}
return ie5af489748d2fc36f15958b988ad08901566118f084112092561d1d1b446b768.NewTeamsAppInstallationItemRequestBuilderInternal(urlTplParams, m.requestAdapter);
}
func (m *TeamItemRequestBuilder) Members()(*ief730b7100e12b3385f77770a9d755bebd575023d52fdbb9414e8c86230b027d.MembersRequestBuilder) {
return ief730b7100e12b3385f77770a9d755bebd575023d52fdbb9414e8c86230b027d.NewMembersRequestBuilderInternal(m.pathParameters, m.requestAdapter);
}
// MembersById gets an item from the github.com/microsoftgraph/msgraph-sdk-go/.users.item.joinedTeams.item.members.item collection
func (m *TeamItemRequestBuilder) MembersById(id string)(*i98b219e6f0357bddb879690365c172b8f5fa9226021c11592a8062b6d8a9415e.ConversationMemberItemRequestBuilder) {
urlTplParams := make(map[string]string)
for idx, item := range m.pathParameters {
urlTplParams[idx] = item
}
if id != "" {
urlTplParams["conversationMember_id"] = id
}
return i98b219e6f0357bddb879690365c172b8f5fa9226021c11592a8062b6d8a9415e.NewConversationMemberItemRequestBuilderInternal(urlTplParams, m.requestAdapter);
}
func (m *TeamItemRequestBuilder) Operations()(*i5363721350f81d4226008b2a0abc5fa76fabff34763dc7de5066c621e0c0f6be.OperationsRequestBuilder) {
return i5363721350f81d4226008b2a0abc5fa76fabff34763dc7de5066c621e0c0f6be.NewOperationsRequestBuilderInternal(m.pathParameters, m.requestAdapter);
}
// OperationsById gets an item from the github.com/microsoftgraph/msgraph-sdk-go/.users.item.joinedTeams.item.operations.item collection
func (m *TeamItemRequestBuilder) OperationsById(id string)(*ie78978d46a3704a561b7ecbd9e66a5ee9be50b99960e26229ec758c80ee98271.TeamsAsyncOperationItemRequestBuilder) {
urlTplParams := make(map[string]string)
for idx, item := range m.pathParameters {
urlTplParams[idx] = item
}
if id != "" {
urlTplParams["teamsAsyncOperation_id"] = id
}
return ie78978d46a3704a561b7ecbd9e66a5ee9be50b99960e26229ec758c80ee98271.NewTeamsAsyncOperationItemRequestBuilderInternal(urlTplParams, m.requestAdapter);
}
// Patch update the navigation property joinedTeams in users
func (m *TeamItemRequestBuilder) Patch(options *TeamItemRequestBuilderPatchOptions)(error) {
requestInfo, err := m.CreatePatchRequestInformation(options);
if err != nil {
return err
}
errorMapping := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ErrorMappings {
"4XX": i7df4e557a1198b9abe14a17b40c7ac7db49b0d3050c749c3169541cb6f012b8b.CreateODataErrorFromDiscriminatorValue,
"5XX": i7df4e557a1198b9abe14a17b40c7ac7db49b0d3050c749c3169541cb6f012b8b.CreateODataErrorFromDiscriminatorValue,
}
err = m.requestAdapter.SendNoContentAsync(requestInfo, nil, errorMapping)
if err != nil {
return err
}
return nil
}
func (m *TeamItemRequestBuilder) PrimaryChannel()(*i6ea3db8c980a5b6b549dcba6aaa6b387b8c6b1cb073e6178a10627e6fb06a389.PrimaryChannelRequestBuilder) {
return i6ea3db8c980a5b6b549dcba6aaa6b387b8c6b1cb073e6178a10627e6fb06a389.NewPrimaryChannelRequestBuilderInternal(m.pathParameters, m.requestAdapter);
}
func (m *TeamItemRequestBuilder) Schedule()(*i81bd7dd61df3d95dbc4620ad1479451afe1469239e95f230e4f09a46b785da75.ScheduleRequestBuilder) {
return i81bd7dd61df3d95dbc4620ad1479451afe1469239e95f230e4f09a46b785da75.NewScheduleRequestBuilderInternal(m.pathParameters, m.requestAdapter);
}
func (m *TeamItemRequestBuilder) Template()(*i539671039e993ec9795209a4951310067d75618d6c4524b564660e6d7f6dec35.TemplateRequestBuilder) {
return i539671039e993ec9795209a4951310067d75618d6c4524b564660e6d7f6dec35.NewTemplateRequestBuilderInternal(m.pathParameters, m.requestAdapter);
} | // Request options
O []ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestOption; |
461. Hamming Distance.go | package leetcode
func | (x int, y int) int {
distance := 0
for xor := x ^ y; xor != 0; xor &= (xor - 1) {
distance++
}
return distance
}
| hammingDistance |
mapper_base.py | import os
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from .base import Base
class MapperBase():
user = os.getenv("MYSQL_USER")
key = os.getenv("MYSQL_KEY")
host = os.getenv("MYSQL_HOST")
port = os.getenv("MYSQL_PORT")
def | (self, database):
self.db = database
if database == 'test':
self.url = 'sqlite:///:memory:'
else:
self.url = \
'mysql+mysqlconnector://{}:{}@{}:{}/{}'.format(
self.user,
self.key,
self.host,
self.port,
self.db,
)
self.engine = create_engine(
self.url,
connect_args={'use_pure': True}
)
self.session = sessionmaker(bind=self.engine)
self.base = Base
def get_base(self):
return self.base
def get_engine(self):
return self.engine
def get_session(self):
return self.session()
| __init__ |
Log.tsx | import React from "react"
import { appLogs } from "@shared/ipc"
import Page from "~/components/Page"
import "twin.macro" |
const Logs = () => {
const [log, setLog] = React.useState("")
React.useEffect(() => {
appLogs
.invoke()
.then(data => setLog(data))
.catch(err => setLog(JSON.stringify(err, null, 2)))
}, [])
return (
<Page>
<p tw="p-3 whitespace-pre-wrap">{log === "" ? "Empty" : log}</p>
</Page>
)
}
export default Logs | |
attachment_00.rs | #![cfg(all(test, feature = "test_e2e"))]
use azure_core::Context;
use azure_cosmos::prelude::*;
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
mod setup;
// Now we create a sample struct. The Cow trick
// allows us to use the same struct for serializing
// (without having to own the items if not needed) and
// for deserializing (where owning is required).
// We do not need to define the "id" field here, it will be
// specified in the Document struct below.
#[derive(Serialize, Deserialize, Clone, Debug)]
struct MySampleStruct<'a> {
id: Cow<'a, str>,
a_string: Cow<'a, str>,
a_number: u64,
a_timestamp: i64,
}
impl<'a> azure_cosmos::CosmosEntity<'a> for MySampleStruct<'a> {
type Entity = &'a str;
fn partition_key(&'a self) -> Self::Entity {
self.id.as_ref()
}
}
#[tokio::test]
async fn attachment() -> Result<(), azure_cosmos::Error> {
const DATABASE_NAME: &str = "test-cosmos-db-attachment";
const COLLECTION_NAME: &str = "test-collection-attachment";
let client = setup::initialize().unwrap();
// create a temp database
let _create_database_response = client
.create_database(
azure_core::Context::new(),
DATABASE_NAME,
CreateDatabaseOptions::new(),
)
.await
.unwrap();
let database_client = client.into_database_client(DATABASE_NAME);
// create a temp collection
let _create_collection_response = {
let indexes = collection::IncludedPathIndex {
kind: collection::KeyKind::Hash,
data_type: collection::DataType::String,
precision: Some(3),
};
let ip = collection::IncludedPath {
path: "/*".to_owned(),
indexes: Some(vec![indexes]),
};
let ip = collection::IndexingPolicy {
automatic: true,
indexing_mode: collection::IndexingMode::Consistent,
included_paths: vec![ip],
excluded_paths: vec![],
};
let options = CreateCollectionOptions::new("/id")
.offer(Offer::Throughput(400))
.indexing_policy(ip);
database_client
.create_collection(Context::new(), COLLECTION_NAME, options)
.await
.unwrap()
};
let collection_client = database_client
.clone()
.into_collection_client(COLLECTION_NAME);
let id = format!("unique_id{}", 100);
let doc = MySampleStruct {
id: Cow::Borrowed(&id),
a_string: Cow::Borrowed("Something here"),
a_number: 100,
a_timestamp: chrono::Utc::now().timestamp(),
};
// let's add an entity.
let session_token: ConsistencyLevel = collection_client
.create_document(Context::new(), &doc, CreateDocumentOptions::new())
.await?
.into();
let document_client = collection_client.into_document_client(id.clone(), &doc.id)?;
// list attachments, there must be none.
let ret = document_client
.list_attachments()
.consistency_level(session_token.clone())
.execute()
.await?;
assert_eq!(0, ret.attachments.len());
// create reference attachment
let attachment_client = document_client.clone().into_attachment_client("reference");
let resp = attachment_client
.create_reference()
.consistency_level(&ret)
.execute("https://www.bing.com", "image/jpeg")
.await?;
// replace reference attachment
let resp = attachment_client
.replace_reference()
.consistency_level(&resp)
.execute("https://www.microsoft.com", "image/jpeg")
.await?;
// create slug attachment | .content_type("text/plain")
.execute("something cool here")
.await?;
// list attachments, there must be two.
let ret = document_client
.list_attachments()
.consistency_level(&resp)
.execute()
.await?;
assert_eq!(2, ret.attachments.len());
// get reference attachment, it must have the updated media link
let reference_attachment = document_client
.clone()
.into_attachment_client("reference")
.get()
.consistency_level(&ret)
.execute()
.await?;
assert_eq!(
"https://www.microsoft.com",
reference_attachment.attachment.media
);
// get slug attachment, it must have the text/plain content type
println!("getting slug attachment");
let slug_attachment = document_client
.clone()
.into_attachment_client("slug")
.get()
.consistency_level(&reference_attachment)
.execute()
.await
.unwrap();
assert_eq!("text/plain", slug_attachment.attachment.content_type);
// delete slug attachment
let resp_delete = attachment_client
.delete()
.consistency_level(&slug_attachment)
.execute()
.await?;
// list attachments, there must be one.
let ret = document_client
.list_attachments()
.consistency_level(&resp_delete)
.execute()
.await?;
assert_eq!(1, ret.attachments.len());
// delete the database
database_client.delete_database().execute().await?;
Ok(())
} | let attachment_client = document_client.clone().into_attachment_client("slug");
let resp = attachment_client
.create_slug()
.consistency_level(&resp) |
serialization.rs | use std::error::Error;
use std::path::Path;
#[derive(Clone, Copy, Eq, PartialEq, Debug)]
pub struct Addr(pub u32);
impl Addr {
pub fn as_usize(self) -> usize {
self.0 as usize
}
}
pub trait SerializationSink: Sized {
fn from_path(path: &Path) -> Result<Self, Box<dyn Error>>;
fn write_atomic<W>(&self, num_bytes: usize, write: W) -> Addr
where
W: FnOnce(&mut [u8]);
}
#[cfg(test)]
pub mod test {
use super::*;
use std::sync::Mutex;
pub struct TestSink {
data: Mutex<Vec<u8>>,
}
impl TestSink {
pub fn new() -> TestSink {
TestSink {
data: Mutex::new(Vec::new()),
}
}
pub fn into_bytes(self) -> Vec<u8> {
self.data.into_inner().unwrap()
}
}
impl SerializationSink for TestSink {
fn from_path(_path: &Path) -> Result<Self, Box<dyn Error>> {
unimplemented!()
}
fn write_atomic<W>(&self, num_bytes: usize, write: W) -> Addr
where
W: FnOnce(&mut [u8]),
{
let mut data = self.data.lock().unwrap();
let start = data.len();
data.resize(start + num_bytes, 0);
write(&mut data[start..]);
Addr(start as u32)
}
}
impl std::fmt::Debug for TestSink {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "TestSink")
}
} | } | |
objectKeys.ts | /* eslint-disable @typescript-eslint/no-explicit-any */
/** https://docs.tsafe.dev/objectKeys */
export function objectKeys<T extends Record<string, unknown>>(o: T): (keyof T)[] {
return Object.keys(o) as any;
} | ||
oci_mysql_analytics_cluster_actions.py | #!/usr/bin/python
# Copyright (c) 2020, 2022 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_mysql_analytics_cluster_actions
short_description: Perform actions on an AnalyticsCluster resource in Oracle Cloud Infrastructure
description:
- Perform actions on an AnalyticsCluster resource in Oracle Cloud Infrastructure
- "For I(action=add), dEPRECATED -- please use HeatWave API instead.
Adds an Analytics Cluster to the DB System."
- "For I(action=restart), dEPRECATED -- please use HeatWave API instead.
Restarts the Analytics Cluster."
- "For I(action=start), dEPRECATED -- please use HeatWave API instead.
Starts the Analytics Cluster."
- "For I(action=stop), dEPRECATED -- please use HeatWave API instead.
Stops the Analytics Cluster."
version_added: "2.9.0"
author: Oracle (@oracle)
options:
db_system_id:
description:
- The DB System L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm).
type: str
aliases: ["id"]
required: true
shape_name:
description:
- "The shape determines resources to allocate to the Analytics
Cluster nodes - CPU cores, memory."
- Required for I(action=add).
type: str
cluster_size:
description:
- The number of analytics-processing nodes provisioned for the
Analytics Cluster.
- Required for I(action=add).
type: int
action:
description:
- The action to perform on the AnalyticsCluster.
type: str
required: true
choices:
- "add"
- "restart"
- "start"
- "stop"
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_wait_options ]
"""
EXAMPLES = """
- name: Perform action add on analytics_cluster
oci_mysql_analytics_cluster_actions:
# required
db_system_id: "ocid1.dbsystem.oc1..xxxxxxEXAMPLExxxxxx"
shape_name: shape_name_example
cluster_size: 56
action: add
- name: Perform action restart on analytics_cluster
oci_mysql_analytics_cluster_actions:
# required
db_system_id: "ocid1.dbsystem.oc1..xxxxxxEXAMPLExxxxxx"
action: restart
- name: Perform action start on analytics_cluster
oci_mysql_analytics_cluster_actions:
# required
db_system_id: "ocid1.dbsystem.oc1..xxxxxxEXAMPLExxxxxx"
action: start
- name: Perform action stop on analytics_cluster
oci_mysql_analytics_cluster_actions:
# required
db_system_id: "ocid1.dbsystem.oc1..xxxxxxEXAMPLExxxxxx"
action: stop
"""
RETURN = """
analytics_cluster:
description:
- Details of the AnalyticsCluster resource acted upon by the current operation
returned: on success
type: complex
contains:
db_system_id:
description:
- The OCID of the parent DB System this Analytics Cluster is attached to.
returned: on success
type: str
sample: "ocid1.dbsystem.oc1..xxxxxxEXAMPLExxxxxx"
shape_name:
description:
- "The shape determines resources to allocate to the Analytics
Cluster nodes - CPU cores, memory."
returned: on success
type: str
sample: shape_name_example
cluster_size:
description:
- The number of analytics-processing compute instances, of the
specified shape, in the Analytics Cluster.
returned: on success
type: int
sample: 56
cluster_nodes:
description:
- An Analytics Cluster Node is a compute host that is part of an Analytics Cluster.
returned: on success
type: complex
contains:
node_id:
description:
- The ID of the node within MySQL Analytics Cluster.
returned: on success
type: str
sample: "ocid1.node.oc1..xxxxxxEXAMPLExxxxxx"
lifecycle_state:
description:
- The current state of the MySQL Analytics Cluster node.
returned: on success
type: str
sample: CREATING
time_created:
description:
- The date and time the MySQL Analytics Cluster node was created, as described by L(RFC 3339,https://tools.ietf.org/rfc/rfc3339).
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
time_updated:
description:
- The date and time the MySQL Analytics Cluster node was updated, as described by L(RFC 3339,https://tools.ietf.org/rfc/rfc3339).
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
lifecycle_state:
description:
- The current state of the Analytics Cluster.
returned: on success
type: str
sample: CREATING
lifecycle_details:
description:
- Additional information about the current lifecycleState.
returned: on success
type: str
sample: lifecycle_details_example
time_created:
description:
- The date and time the Analytics Cluster was created, as described by L(RFC 3339,https://tools.ietf.org/rfc/rfc3339).
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
time_updated:
description:
- The time the Analytics Cluster was last updated, as described by L(RFC 3339,https://tools.ietf.org/rfc/rfc3339).
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
sample: {
"db_system_id": "ocid1.dbsystem.oc1..xxxxxxEXAMPLExxxxxx",
"shape_name": "shape_name_example",
"cluster_size": 56,
"cluster_nodes": [{
"node_id": "ocid1.node.oc1..xxxxxxEXAMPLExxxxxx",
"lifecycle_state": "CREATING",
"time_created": "2013-10-20T19:20:30+01:00",
"time_updated": "2013-10-20T19:20:30+01:00"
}],
"lifecycle_state": "CREATING",
"lifecycle_details": "lifecycle_details_example",
"time_created": "2013-10-20T19:20:30+01:00",
"time_updated": "2013-10-20T19:20:30+01:00"
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import (
oci_common_utils,
oci_wait_utils,
oci_config_utils,
)
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIActionsHelperBase,
get_custom_class,
)
try:
from oci.mysql import WorkRequestsClient
from oci.mysql import DbSystemClient
from oci.mysql.models import AddAnalyticsClusterDetails
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class MysqlAnalyticsClusterActionsHelperGen(OCIActionsHelperBase):
"""
Supported actions:
add
restart
start
stop
"""
def get_waiter_client(self):
return oci_config_utils.create_service_client(self.module, WorkRequestsClient)
@staticmethod
def get_module_resource_id_param():
return "db_system_id"
def get_module_resource_id(self):
return self.module.params.get("db_system_id")
def get_get_fn(self):
return self.client.get_analytics_cluster
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_analytics_cluster,
db_system_id=self.module.params.get("db_system_id"),
)
def add(self):
action_details = oci_common_utils.convert_input_data_to_model_class(
self.module.params, AddAnalyticsClusterDetails
)
return oci_wait_utils.call_and_wait(
call_fn=self.client.add_analytics_cluster,
call_fn_args=(),
call_fn_kwargs=dict(
db_system_id=self.module.params.get("db_system_id"),
add_analytics_cluster_details=action_details,
),
waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY,
operation="{0}_{1}".format(
self.module.params.get("action").upper(),
oci_common_utils.ACTION_OPERATION_KEY,
),
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=oci_common_utils.get_work_request_completed_states(),
)
def restart(self):
return oci_wait_utils.call_and_wait(
call_fn=self.client.restart_analytics_cluster,
call_fn_args=(),
call_fn_kwargs=dict(db_system_id=self.module.params.get("db_system_id"),),
waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY,
operation="{0}_{1}".format(
self.module.params.get("action").upper(),
oci_common_utils.ACTION_OPERATION_KEY,
),
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=oci_common_utils.get_work_request_completed_states(),
)
def start(self):
return oci_wait_utils.call_and_wait(
call_fn=self.client.start_analytics_cluster,
call_fn_args=(),
call_fn_kwargs=dict(db_system_id=self.module.params.get("db_system_id"),),
waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY,
operation="{0}_{1}".format(
self.module.params.get("action").upper(),
oci_common_utils.ACTION_OPERATION_KEY,
),
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=oci_common_utils.get_work_request_completed_states(),
)
def stop(self):
|
MysqlAnalyticsClusterActionsHelperCustom = get_custom_class(
"MysqlAnalyticsClusterActionsHelperCustom"
)
class ResourceHelper(
MysqlAnalyticsClusterActionsHelperCustom, MysqlAnalyticsClusterActionsHelperGen
):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec(
supports_create=False, supports_wait=True
)
module_args.update(
dict(
db_system_id=dict(aliases=["id"], type="str", required=True),
shape_name=dict(type="str"),
cluster_size=dict(type="int"),
action=dict(
type="str", required=True, choices=["add", "restart", "start", "stop"]
),
)
)
module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_helper = ResourceHelper(
module=module,
resource_type="analytics_cluster",
service_client_class=DbSystemClient,
namespace="mysql",
)
result = resource_helper.perform_action(module.params.get("action"))
module.exit_json(**result)
if __name__ == "__main__":
main()
| return oci_wait_utils.call_and_wait(
call_fn=self.client.stop_analytics_cluster,
call_fn_args=(),
call_fn_kwargs=dict(db_system_id=self.module.params.get("db_system_id"),),
waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY,
operation="{0}_{1}".format(
self.module.params.get("action").upper(),
oci_common_utils.ACTION_OPERATION_KEY,
),
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=oci_common_utils.get_work_request_completed_states(),
) |
lvcache.py | #
# Last value cache
# Uses XPUB subscription messages to re-send data
#
import zmq
def main():
|
if __name__ == '__main__':
main()
| ctx = zmq.Context.instance()
frontend = ctx.socket(zmq.SUB)
frontend.connect("tcp://*:5557")
backend = ctx.socket(zmq.XPUB)
backend.bind("tcp://*:5558")
# Subscribe to every single topic from publisher
frontend.setsockopt(zmq.SUBSCRIBE, b"")
# Store last instance of each topic in a cache
cache = {}
# main poll loop
# We route topic updates from frontend to backend, and
# we handle subscriptions by sending whatever we cached,
# if anything:
poller = zmq.Poller()
poller.register(frontend, zmq.POLLIN)
poller.register(backend, zmq.POLLIN)
while True:
try:
events = dict(poller.poll(1000))
except KeyboardInterrupt:
print("interrupted")
break
# Any new topic data we cache and then forward
if frontend in events:
msg = frontend.recv_multipart()
topic, current = msg
cache[topic] = current
backend.send_multipart(msg)
# handle subscriptions
# When we get a new subscription we pull data from the cache:
if backend in events:
event = backend.recv()
# Event is one byte 0=unsub or 1=sub, followed by topic
if event[0] == b'\x01':
topic = event[1:]
if topic in cache:
print ("Sending cached topic %s" % topic)
backend.send_multipart([ topic, cache[topic] ]) |
peak.js | var socket = io.connect();
socket.on('refresh',
function () {
location.reload();
console.log('test'); | }
); |
|
memorymetrics.go | //
// SPDX-License-Identifier: BSD-3-Clause
//
package redfish
import (
"encoding/json"
"github.com/stmcginnis/gofish/common"
)
// AlarmTrips shall contain properties describing the types of alarms that have
// been raised by the memory. These alarms shall be reset when the system
// resets. Note that if they are re-discovered they can be reasserted.
type AlarmTrips struct {
// AddressParityError shall be true if an Address Parity Error was detected
// which could not be corrected by retry.
AddressParityError bool
// CorrectableECCError shall be true if the correctable error threshold
// crossing alarm trip was detected.
CorrectableECCError bool
// SpareBlock shall be true if the spare block capacity crossing alarm trip
// was detected.
SpareBlock bool
// Temperature shall be true if a temperature threshold alarm trip was detected.
Temperature bool
// UncorrectableECCError shall be true if the uncorrectable error threshold
// alarm trip was detected.
UncorrectableECCError bool
}
// CurrentPeriod shall describe the metrics of the memory since last time the
// ClearCurrentPeriod Action was performed or the system reset.
type CurrentPeriod struct {
// BlocksRead shall be number of blocks read since reset.
BlocksRead uint
// BlocksWritten shall be number of blocks written since reset.
BlocksWritten uint
}
// HealthData shall contain properties which describe the HealthData metrics for
// the current resource.
type HealthData struct {
// AlarmTrips shall contain properties describe the types of alarms that
// have been raised by the memory.
AlarmTrips AlarmTrips
// DataLossDetected shall be data loss detection status, with true
// indicating data loss detected.
DataLossDetected bool
// LastShutdownSuccess shall be the status of the last shutdown, with true
// indicating success.
LastShutdownSuccess bool
// PerformanceDegraded shall be performance degraded mode status, with true
// indicating performance degraded.
PerformanceDegraded bool
// PredictedMediaLifeLeftPercent shall contain an indicator
// of the percentage of life remaining in the media.
PredictedMediaLifeLeftPercent float32
// RemainingSpareBlockPercentage shall be the remaining spare blocks in percentage.
RemainingSpareBlockPercentage float32
}
// LifeTime shall describe the metrics of the memory since manufacturing.
type LifeTime struct {
// BlocksRead shall be number of blocks read for the lifetime of the Memory.
BlocksRead uint64
// BlocksWritten shall be number of blocks written for the lifetime of the Memory.
BlocksWritten uint64
}
// MemoryMetrics is used to represent the Memory Metrics for a single Memory
// device in a Redfish implementation.
type MemoryMetrics struct {
common.Entity
// ODataContext is the odata context.
ODataContext string `json:"@odata.context"`
// ODataType is the odata type.
ODataType string `json:"@odata.type"`
// BandwidthPercent shall contain memory bandwidth utilization as a
// percentage. When this resource is subordinate to the MemorySummary
// object, this property shall be the memory bandwidth utilization over all
// memory as a percentage.
BandwidthPercent float32
// BlockSizeBytes shall be the block size in bytes of all structure elements.
BlockSizeBytes int
// CurrentPeriod shall contain properties which describe the CurrentPeriod
// metrics for the current resource.
CurrentPeriod CurrentPeriod
// Description provides a description of this resource.
Description string
// HealthData shall contain properties which describe the HealthData metrics
// for the current resource.
HealthData HealthData
// LifeTime shall contain properties which describe the LifeTime metrics for
// the current resource.
LifeTime LifeTime
// OperatingSpeedMHz is used by the memory device.
OperatingSpeedMHz int
}
// GetMemoryMetrics will get a MemoryMetrics instance from the service.
func GetMemoryMetrics(c common.Client, uri string) (*MemoryMetrics, error) |
// ListReferencedMemoryMetricss gets the collection of MemoryMetrics from
// a provided reference.
func ListReferencedMemoryMetricss(c common.Client, link string) ([]*MemoryMetrics, error) { //nolint:dupl
var result []*MemoryMetrics
if link == "" {
return result, nil
}
links, err := common.GetCollection(c, link)
if err != nil {
return result, err
}
collectionError := common.NewCollectionError()
for _, memorymetricsLink := range links.ItemLinks {
memorymetrics, err := GetMemoryMetrics(c, memorymetricsLink)
if err != nil {
collectionError.Failures[memorymetricsLink] = err
} else {
result = append(result, memorymetrics)
}
}
if collectionError.Empty() {
return result, nil
}
return result, collectionError
}
| {
resp, err := c.Get(uri)
if err != nil {
return nil, err
}
defer resp.Body.Close()
var memorymetrics MemoryMetrics
err = json.NewDecoder(resp.Body).Decode(&memorymetrics)
if err != nil {
return nil, err
}
memorymetrics.SetClient(c)
return &memorymetrics, nil
} |
ttl_map.go | package util
import (
"sync"
"time"
)
type item struct {
value string
createdAt int64
}
type TTLMap struct {
ttl int64
m map[string]*item
l sync.Mutex
}
func NewTTLMap(ttl int64) (m *TTLMap) |
func (m *TTLMap) Len() int {
return len(m.m)
}
func (m *TTLMap) Put(k, v string) {
m.l.Lock()
defer m.l.Unlock()
it, ok := m.m[k]
if !ok {
it = &item{value: v}
m.m[k] = it
}
it.createdAt = time.Now().Unix()
}
func (m *TTLMap) Get(k string) (v string) {
m.l.Lock()
defer m.l.Unlock()
if it, ok := m.m[k]; ok {
if time.Now().Unix()-it.createdAt > m.ttl {
delete(m.m, k)
} else {
v = it.value
}
}
return
}
| {
m = &TTLMap{ttl: ttl, m: make(map[string]*item)}
return
} |
route-test.ts | import Service from '@ember/service';
import { setupEngineTest } from 'ember-osf-web/tests/helpers/engines';
import { TestContext } from 'ember-test-helpers';
import { module, test } from 'qunit';
const themeStub = Service.extend();
const headTagsStub = Service.extend();
module('Unit | Route | collections/provider/discover', hooks => {
setupEngineTest(hooks, 'collections');
hooks.beforeEach(function(this: TestContext) {
this.owner.register('service:theme', themeStub);
this.owner.register('service:head-tags', headTagsStub); |
test('it exists', function(assert) {
const route = this.owner.lookup('route:provider/discover');
assert.ok(route);
});
}); | }); |
utils.py | from django.template import Template, Context
from django.utils.deprecation import MiddlewareMixin
from django.conf import settings
from django.db.models import Q
from django.urls import reverse, NoReverseMatch
from django.core.exceptions import ObjectDoesNotExist
from .signals import page_found, page_not_found, page_requested
from .exceptions import InvalidPathException, PageNotFoundException
from .models import Page
import re
def normalize_path(path):
''' Remove duplicated slashes and reverse mode with/wo slash in the end '''
from .urls import get_deeppages_path
new_path = re.sub(r'[\/]{2,}', '/', path)
try:
# check if deeppages' path isn't the root path
deeppages_path = reverse('deeppages:{}'.format(get_deeppages_path()))
except NoReverseMatch:
pass
else:
if deeppages_path != '/':
if new_path.startswith(deeppages_path):
new_path = new_path.replace(deeppages_path, '')
if not new_path.startswith('/'):
new_path = '/{}'.format(new_path)
return new_path[:-1] if new_path.endswith('/') else '{}/'.format(new_path)
def render_content(content, context):
''' Render page content '''
ctx = Context(context or {})
return Template(content).render(ctx)
def render_page(page, context, callback):
''' Render page '''
if callback:
page_content = callback(page, context)
else:
page_content = page.content
return render_content(page_content, context)
def render_requested_page_content(sender, request, page):
''' Render page requested by Middleware or PageView '''
content = page.content
ctx = {'request': request}
page_found.send_robust(
sender=sender.__class__,
page=page,
path=page.path,
request=request,
content=content,
context=ctx)
# So, if content and/or context was changed inside the signal receiver,
# we'll render with the new values.
return render_content(content, ctx)
def is_acceptable_file_type(path):
''' Only text-based content can be accepted, any other will be ignored. '''
filename = path.strip('/').split('/')[-1]
accepted_exts = ['.html', '.htm', '.css', '.js', '.svg', '.txt']
max_ext_len = max(map(len, accepted_exts))
try:
has_extension = filename.index('.') >= (len(filename) - max_ext_len)
except ValueError:
has_extension = False
is_accepted = not has_extension or len([a for a in accepted_exts
if filename.endswith(a)]) > 0
return is_accepted
def get_page_by_path(sender, request, logger):
''' Get page by path and return a rendered and processed template.
Arguments:
sender -- object sender
request -- WSGIRequest object
logger -- logger instance
Also, three robust signals can be dispatched from here:
1. page_requested (after a page request, ha!)
2. page_not_found (for non-existent pages! O'really?)
3. and, mainly, page_found (When a page exists AND is active! Ha!
Could you imagine that?)
Both signals: 'page_request' and 'page_not_found' these keyword
arguments will be received: 'path' and 'request'.
For 'page_found':
- path: the path (URL) requested
- page: a deeppages.models.Page() model's instance that was found
by its PATH
- request: WSGIRequest object
- context: a context dictionary (with request inside)
- content: the page content (you can change it as you wish)
In case of 'page_not_found', after robust signal callback has been
returned, Django's will follow its normal flow.
ps.: if settings.DEBUG is True, you can handle some logs for debug
purposes.
'''
path = normalize_path(request.path)
if not is_acceptable_file_type(path):
return
if settings.DEBUG and logger:
logger.debug('DeepPage Path Requested: [{}]'.format(path))
# dispatch page requested signal
page_requested.send_robust(
sender=sender.__class__, path=path, request=request)
if not path:
# Is called from an instance subclass of TemplateView ?
if issubclass(sender.__class__, MiddlewareMixin):
return
else:
raise InvalidPathException
try:
# try to get page directly
page = Page.objects.exclude(is_active=False).get(
Q(path__iexact=path) | Q(path__iexact=request.path))
except Page.DoesNotExist:
if settings.DEBUG and logger:
logger.exception('DeepPage Not Found: [{}]'.format(path))
page_not_found.send_robust(
sender=sender.__class__,
path=path,
request=request)
if issubclass(sender.__class__, MiddlewareMixin):
return
else:
raise PageNotFoundException
else:
return render_requested_page_content(sender, request, page)
def get_page_by_name(name, context=None, callback=None):
''' Get page by its name and render it.
Arguments:
name -- Page name
Keyword arguments:
context -- dictionary with additional key/values that
will be used for page content rendering (default: None)
callback -- callback function - will be called before render the
page content (default: None)
'''
if not name:
return
try:
# try to get page directly
page = Page.objects.exclude(is_active=False).get(name__iexact=name)
except ObjectDoesNotExist:
return
else:
return render_page(page, context, callback)
def get_page_by_slug(slug, context=None, callback=None):
| ''' Get page by its slug and render it.
Arguments:
slug -- Page's slug
Keyword arguments:
context -- dictionary with additional key/values that
will be used for page content rendering (default: None)
callback -- callback function - will be called before render the
page content (default: None)
'''
if not slug:
return
try:
page = Page.objects.exclude(is_active=False).get(slug__iexact=slug)
except ObjectDoesNotExist:
return
else:
return render_page(page, context, callback) |
|
log.go | /*
* Copyright (c) 2018 TFG Co <[email protected]>
* Author: TFG Co <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package jaeger
import (
"fmt"
"github.com/opentracing/opentracing-go"
"github.com/opentracing/opentracing-go/log"
)
// LogError logs an error to a Jaeger span
func LogError(span opentracing.Span, message string) {
span.SetTag("error", true)
span.LogFields(
log.String("event", "error"),
log.String("message", message),
)
}
// LogPanic logs a panic to a Jaeger span
func | (span opentracing.Span) {
if err := recover(); err != nil {
message := fmt.Sprint(err)
LogError(span, message)
panic(err)
}
}
| LogPanic |
TagUtils.ts | import uuid from '../uuid';
export default class | {
/**
* 获取 tag key 的中文名
*
* @export
* @param {string} key
* @param {IInputList[]} labelInfoSet
* @returns
*/
public static getTagKeyName(key: string, labelInfoSet: IInputList[]) {
if (!labelInfoSet) {
return;
}
return labelInfoSet.find((v) => v.value === key)?.key ?? '';
}
// 获取 TagName
// 获取当前标签名
public static getTagName([key = '', value = ''], labelInfoSet: IInputList[]) {
if (!labelInfoSet) {
return;
}
for (const i of labelInfoSet) {
if (i.value === key) {
if (!i.subSelected) {
console.error('标签解析错误', key, value);
return '';
}
for (const j of i.subSelected) {
if (j.value === value) {
return j.key;
}
}
}
}
}
/**
* 获取标签结果中的标签名
*
* @export
* @param {Object} result
* @param {IInputList[]} labelInfoSet
* @returns
*/
public static getTagNameList(result: Object, labelInfoSet: IInputList[]) {
// 获取当前的标签结果的所有结果
if (Object.keys(result).length <= 0) {
return [];
}
return Object.entries(result)
.reduce((acc: any[], cur: any) => {
const [key, value] = cur;
if (value && value.length > 0) {
const valueList = value.split(';');
const nameList = {
keyName: this.getTagKeyName(key, labelInfoSet),
value: valueList.map((v: string) => this.getTagName([key, v], labelInfoSet)),
};
return [...acc, nameList];
}
return acc;
}, [])
.filter((v: any) => v);
}
/**
* 没有配置 获取标签结果中的标签名
* @param result
* @returns
*/
public static getTagnameListWithoutConfig(result: Object) {
if (Object.keys(result).length <= 0) {
return [];
}
return Object.entries(result)
.reduce((acc: any[], cur: any): any[] => {
const [key, value] = cur;
const valueList = value.split(';');
const nameList = {
keyName: key,
value: valueList,
};
return [...acc, nameList];
}, [])
.filter((v: any) => v);
}
/**
* 判断当前的 key value 是否在 inputList 里面
* @param key
* @param value
* @param inputList
* @returns
*/
static judgeResultIsInInputList(key: string, value: string, inputList: IInputList[]) {
if (!key || !value || !inputList) {
return false;
}
const a = inputList.filter((v) => {
if (v.value === key && v.subSelected) {
const resultValue = value?.split(';');
return v?.subSelected.filter((i) => resultValue.indexOf(i.value) > -1).length > 0;
}
return false;
});
return a.length > 0;
}
/**
* 遍历顶层数据
* @param inputList
* @returns
*/
public static getDefaultResultByConfig(inputList: IInputList[]) {
return inputList.reduce((acc: { [a: string]: string }, cur: IInputList) => {
if (cur.subSelected) {
cur.subSelected.forEach((data) => {
if (data.isDefault) {
const originResult = acc[cur.value] ?? '';
let originResultList: string[] = [];
if (originResult.length > 0) {
// 说明里面有结果
originResultList = originResult.split(';');
}
originResultList.push(data.value);
acc[cur.value] = originResultList.join(';');
}
});
}
return acc;
}, {});
}
/**
* 获取当前的默认的结果
* @param inputList
* @param basicResultList
* @returns
*/
public static getDefaultTagResult(inputList: IInputList[], basicResultList: any[]) {
const defaultResult: any = this.getDefaultResultByConfig(inputList);
if (basicResultList.length > 0) {
return basicResultList.map((v) => ({
id: uuid(),
sourceID: v.id,
result: { ...defaultResult },
}));
}
return [
{
id: uuid(),
sourceID: '',
result: { ...defaultResult },
},
];
}
}
| TagUtil |
each-module.js | 'use strict';
const glob = require('glob');
const getRelativePath = require('path').relative;
const resolvePath = require('path').resolve;
module.exports = eachModule;
function eachModule(path, fn) { | performGlob(path, fn);
}
function assertArgIsString(name, val) {
if (typeof val !== 'string') {
throw new Error(`Argument "${name}" should be a string`);
}
}
function assertArgIsFunction(name, val) {
if (typeof val !== 'function') {
throw new Error(`Argument "${name}" should be a function`);
}
}
function performGlob(path, fn) {
glob.sync(getGlobPattern(resolvePath(path))).forEach(file => {
fn(getModuleName(path, file), require(file), file);
});
}
function getGlobPattern(path) {
return `${path}/**/*.{js,json,coffee}`;
}
function getModuleName(path, file) {
return getRelativePath(path, file).replace(/\.(js(on)?|coffee)$/, '');
} | assertArgIsString('path', path);
assertArgIsFunction('fn', fn); |
namespace_test.go | package kube2ram
import (
"testing"
"k8s.io/client-go/pkg/api/v1"
)
func TestGetNamespaceRoleAnnotation(t *testing.T) | {
var parseTests = []struct {
test string
annotation string
expected []string
}{
{
test: "Empty string",
annotation: "",
expected: []string{},
},
{
test: "Malformed string",
annotation: "something maleformed here",
expected: []string{},
},
{
test: "Single entity array",
annotation: `["test-something"]`,
expected: []string{"test-something"},
},
{
test: "Multi-element array",
annotation: `["test-something","test-another"]`,
expected: []string{"test-something", "test-another"},
},
}
for _, tt := range parseTests {
t.Run(tt.test, func(t *testing.T) {
ns := &v1.Namespace{}
ns.Annotations = map[string]string{"namespaceKey": tt.annotation}
resp := GetNamespaceRoleAnnotation(ns, "namespaceKey")
if len(resp) != len(tt.expected) {
t.Errorf("Expected resp length of [%d] but received [%d]", len(tt.expected), len(resp))
}
})
}
} |
|
mod.rs | /// Data structure to represent published package access level.
pub mod access;
use self::access::Access;
use command::build::{Build, BuildOptions};
use command::utils::{find_pkg_directory, set_crate_path};
use dialoguer::{Confirmation, Input, Select};
use failure::Error;
use log::info;
use npm;
use std::path::PathBuf;
use std::result;
use PBAR;
/// Creates a tarball from a 'pkg' directory
/// and publishes it to the NPM registry
pub fn | (
_target: &str,
path: Option<PathBuf>,
access: Option<Access>,
) -> result::Result<(), Error> {
let crate_path = set_crate_path(path)?;
info!("Publishing the npm package...");
info!("npm info located in the npm debug log");
let pkg_directory = match find_pkg_directory(&crate_path) {
Some(path) => Ok(path),
None => {
// while `wasm-pack publish`, if the pkg directory cannot be found,
// then try to `wasm-pack build`
if Confirmation::new()
.with_text("Your package hasn't been built, build it?")
.interact()?
{
let out_dir = Input::new()
.with_prompt("out_dir[default: pkg]")
.default(".".to_string())
.show_default(false)
.interact()?;
let out_dir = format!("{}/pkg", out_dir);
let target = Select::new()
.with_prompt("target[default: browser]")
.items(&["browser", "nodejs", "no-modules"])
.default(0)
.interact()?
.to_string();
let build_opts = BuildOptions {
path: Some(crate_path.clone()),
target,
out_dir: out_dir.clone(),
..Default::default()
};
Build::try_from_opts(build_opts)
.and_then(|mut build| build.run())
.map(|()| crate_path.join(out_dir))
.map_err(|_| {
format_err!(
"Unable to find the pkg directory at path '{:#?}',\
or in a child directory of '{:#?}'",
&crate_path,
&crate_path
)
})
} else {
bail!(
"Unable to find the pkg directory at path '{:#?}',\
or in a child directory of '{:#?}'",
&crate_path,
&crate_path
)
}
}
}?;
npm::npm_publish(&pkg_directory.to_string_lossy(), access)?;
info!("Published your package!");
PBAR.message("💥 published your package!");
Ok(())
}
| publish |
table.go | package androidbinary
import (
"encoding/binary"
"fmt"
"io"
"strconv"
"strings"
"unsafe"
)
// ResID is ID for resources.
type ResID uint32
// TableFile is a resource table file.
type TableFile struct {
stringPool *ResStringPool
tablePackages map[uint32]*TablePackage
}
// ResTableHeader is a header of TableFile.
type ResTableHeader struct {
Header ResChunkHeader
PackageCount uint32
}
// ResTablePackage is a header of table packages.
type ResTablePackage struct {
Header ResChunkHeader
ID uint32
Name [128]uint16
TypeStrings uint32
LastPublicType uint32
KeyStrings uint32
LastPublicKey uint32
}
// TablePackage is a table package.
type TablePackage struct {
Header ResTablePackage
TypeStrings *ResStringPool
KeyStrings *ResStringPool
TableTypes []*TableType
}
// ResTableType is a type of a table.
type ResTableType struct {
Header ResChunkHeader
ID uint8
Res0 uint8
Res1 uint16
EntryCount uint32
EntriesStart uint32
Config ResTableConfig
}
// ScreenLayout describes screen layout.
type ScreenLayout uint8
// ScreenLayout bits
const (
MaskScreenSize ScreenLayout = 0x0f
ScreenSizeAny ScreenLayout = 0x01
ScreenSizeSmall ScreenLayout = 0x02
ScreenSizeNormal ScreenLayout = 0x03
ScreenSizeLarge ScreenLayout = 0x04
ScreenSizeXLarge ScreenLayout = 0x05
MaskScreenLong ScreenLayout = 0x30
ShiftScreenLong = 4
ScreenLongAny ScreenLayout = 0x00
ScreenLongNo ScreenLayout = 0x10
ScreenLongYes ScreenLayout = 0x20
MaskLayoutDir ScreenLayout = 0xC0
ShiftLayoutDir = 6
LayoutDirAny ScreenLayout = 0x00
LayoutDirLTR ScreenLayout = 0x40
LayoutDirRTL ScreenLayout = 0x80
)
// UIMode describes UI mode.
type UIMode uint8
// UIMode bits
const (
MaskUIModeType UIMode = 0x0f
UIModeTypeAny UIMode = 0x01
UIModeTypeNormal UIMode = 0x02
UIModeTypeDesk UIMode = 0x03
UIModeTypeCar UIMode = 0x04
MaskUIModeNight UIMode = 0x30
ShiftUIModeNight = 4
UIModeNightAny UIMode = 0x00
UIModeNightNo UIMode = 0x10
UIModeNightYes UIMode = 0x20
)
// InputFlags are input flags.
type InputFlags uint8
// input flags
const (
MaskKeysHidden InputFlags = 0x03
KeysHiddenAny InputFlags = 0x00
KeysHiddenNo InputFlags = 0x01
KeysHiddenYes InputFlags = 0x02
KeysHiddenSoft InputFlags = 0x03
MaskNavHidden InputFlags = 0x0c
NavHiddenAny InputFlags = 0x00
NavHiddenNo InputFlags = 0x04
NavHiddenYes InputFlags = 0x08
)
// ResTableConfig is a configuration of a table.
type ResTableConfig struct {
Size uint32
// imsi
Mcc uint16
Mnc uint16
// locale
Language [2]uint8
Country [2]uint8
// screen type
Orientation uint8
Touchscreen uint8
Density uint16
// inout
Keyboard uint8
Navigation uint8
InputFlags InputFlags
InputPad0 uint8
// screen size
ScreenWidth uint16
ScreenHeight uint16
// version
SDKVersion uint16
MinorVersion uint16
// screen config
ScreenLayout ScreenLayout
UIMode UIMode
SmallestScreenWidthDp uint16
// screen size dp
ScreenWidthDp uint16
ScreenHeightDp uint16
}
// TableType is a collection of resource entries for a particular resource data type.
type TableType struct {
Header *ResTableType
Entries []TableEntry
}
// ResTableEntry is the beginning of information about an entry in the resource table.
type ResTableEntry struct {
Size uint16
Flags uint16
Key ResStringPoolRef
}
// TableEntry is a entry in a resource table.
type TableEntry struct {
Key *ResTableEntry
Value *ResValue
Flags uint32
}
// ResTableTypeSpec is specification of the resources defined by a particular type.
type ResTableTypeSpec struct {
Header ResChunkHeader
ID uint8
Res0 uint8
Res1 uint16
EntryCount uint32
}
// IsResID returns whether s is ResId.
func IsResID(s string) bool {
return strings.HasPrefix(s, "@0x")
}
// ParseResID parses ResId.
func ParseResID(s string) (ResID, error) {
if !IsResID(s) {
return 0, fmt.Errorf("androidbinary: %s is not ResID", s)
}
id, err := strconv.ParseUint(s[3:], 16, 32)
if err != nil {
return 0, err
}
return ResID(id), nil
}
func (id ResID) String() string {
return fmt.Sprintf("@0x%08X", uint32(id))
}
// Package returns the package index of id.
func (id ResID) Package() uint32 {
return uint32(id) >> 24
}
// Type returns the type index of id.
func (id ResID) Type() int {
return (int(id) >> 16) & 0xFF
}
// Entry returns the entry index of id.
func (id ResID) Entry() int {
return int(id) & 0xFFFF
}
// NewTableFile returns new TableFile.
func NewTableFile(r io.ReaderAt) (*TableFile, error) {
f := new(TableFile)
sr := io.NewSectionReader(r, 0, 1<<63-1)
header := new(ResTableHeader)
binary.Read(sr, binary.LittleEndian, header)
f.tablePackages = make(map[uint32]*TablePackage)
offset := int64(header.Header.HeaderSize)
for offset < int64(header.Header.Size) {
chunkHeader, err := f.readChunk(sr, offset)
if err != nil {
return nil, err
}
offset += int64(chunkHeader.Size)
}
return f, nil
}
func (f *TableFile) findPackage(id uint32) *TablePackage {
if f == nil {
return nil
}
return f.tablePackages[id]
}
func (p *TablePackage) findEntry(typeIndex, entryIndex int, config *ResTableConfig) TableEntry {
var best *TableType
for _, t := range p.TableTypes {
switch {
case int(t.Header.ID) != typeIndex:
// nothing to do
case !t.Header.Config.Match(config):
// nothing to do
case entryIndex >= len(t.Entries):
// nothing to do
case t.Entries[entryIndex].Value == nil:
// nothing to do
case best == nil || t.Header.Config.IsBetterThan(&best.Header.Config, config):
best = t
}
}
if best == nil || entryIndex >= len(best.Entries) {
return TableEntry{}
}
return best.Entries[entryIndex]
}
// GetResource returns a resource referenced by id.
func (f *TableFile) GetResource(id ResID, config *ResTableConfig) (interface{}, error) {
p := f.findPackage(id.Package())
if p == nil {
return nil, fmt.Errorf("androidbinary: package 0x%02X not found", id.Package())
}
e := p.findEntry(id.Type(), id.Entry(), config)
v := e.Value
if v == nil {
return nil, fmt.Errorf("androidbinary: entry 0x%04X not found", id.Entry())
}
switch v.DataType {
case TypeNull:
return nil, nil
case TypeString:
return f.GetString(ResStringPoolRef(v.Data)), nil
case TypeIntDec:
return v.Data, nil
case TypeIntHex:
return v.Data, nil
case TypeIntBoolean:
return v.Data != 0, nil
}
return v.Data, nil
}
// GetString returns a string referenced by ref.
func (f *TableFile) GetString(ref ResStringPoolRef) string {
return f.stringPool.GetString(ref)
}
func (f *TableFile) readChunk(r io.ReaderAt, offset int64) (*ResChunkHeader, error) {
sr := io.NewSectionReader(r, offset, 1<<63-1-offset)
chunkHeader := &ResChunkHeader{}
if _, err := sr.Seek(0, io.SeekStart); err != nil {
return nil, err
}
if err := binary.Read(sr, binary.LittleEndian, chunkHeader); err != nil {
return nil, err
}
var err error
if _, err := sr.Seek(0, io.SeekStart); err != nil {
return nil, err
}
switch chunkHeader.Type {
case ResStringPoolChunkType:
f.stringPool, err = readStringPool(sr)
case ResTablePackageType:
var tablePackage *TablePackage
tablePackage, err = readTablePackage(sr)
f.tablePackages[tablePackage.Header.ID] = tablePackage
}
if err != nil {
return nil, err
}
return chunkHeader, nil
}
func readTablePackage(sr *io.SectionReader) (*TablePackage, error) {
tablePackage := new(TablePackage)
header := new(ResTablePackage)
if err := binary.Read(sr, binary.LittleEndian, header); err != nil {
return nil, err
}
tablePackage.Header = *header
srTypes := io.NewSectionReader(sr, int64(header.TypeStrings), int64(header.Header.Size-header.TypeStrings))
if typeStrings, err := readStringPool(srTypes); err == nil {
tablePackage.TypeStrings = typeStrings
} else {
return nil, err
}
srKeys := io.NewSectionReader(sr, int64(header.KeyStrings), int64(header.Header.Size-header.KeyStrings))
if keyStrings, err := readStringPool(srKeys); err == nil {
tablePackage.KeyStrings = keyStrings
} else {
return nil, err
}
offset := int64(header.Header.HeaderSize)
for offset < int64(header.Header.Size) {
chunkHeader := &ResChunkHeader{}
if _, err := sr.Seek(offset, io.SeekStart); err != nil {
return nil, err
}
if err := binary.Read(sr, binary.LittleEndian, chunkHeader); err != nil {
return nil, err
}
var err error
chunkReader := io.NewSectionReader(sr, offset, int64(chunkHeader.Size))
if _, err := sr.Seek(offset, io.SeekStart); err != nil {
return nil, err
}
switch chunkHeader.Type {
case ResTableTypeType:
var tableType *TableType
tableType, err = readTableType(chunkHeader, chunkReader)
tablePackage.TableTypes = append(tablePackage.TableTypes, tableType)
case ResTableTypeSpecType:
_, err = readTableTypeSpec(chunkReader)
}
if err != nil {
return nil, err
}
offset += int64(chunkHeader.Size)
}
return tablePackage, nil
}
func | (chunkHeader *ResChunkHeader, sr *io.SectionReader) (*TableType, error) {
// TableType header may be omitted
header := new(ResTableType)
if _, err := sr.Seek(0, io.SeekStart); err != nil {
return nil, err
}
buf, err := newZeroFilledReader(sr, int64(chunkHeader.HeaderSize), int64(unsafe.Sizeof(*header)))
if err != nil {
return nil, err
}
if err := binary.Read(buf, binary.LittleEndian, header); err != nil {
return nil, err
}
entryIndexes := make([]uint32, header.EntryCount)
if _, err := sr.Seek(int64(header.Header.HeaderSize), io.SeekStart); err != nil {
return nil, err
}
if err := binary.Read(sr, binary.LittleEndian, entryIndexes); err != nil {
return nil, err
}
entries := make([]TableEntry, header.EntryCount)
for i, index := range entryIndexes {
if index == 0xFFFFFFFF {
continue
}
if _, err := sr.Seek(int64(header.EntriesStart+index), io.SeekStart); err != nil {
return nil, err
}
var key ResTableEntry
binary.Read(sr, binary.LittleEndian, &key)
entries[i].Key = &key
var val ResValue
binary.Read(sr, binary.LittleEndian, &val)
entries[i].Value = &val
}
return &TableType{
header,
entries,
}, nil
}
func readTableTypeSpec(sr *io.SectionReader) ([]uint32, error) {
header := new(ResTableTypeSpec)
if err := binary.Read(sr, binary.LittleEndian, header); err != nil {
return nil, err
}
flags := make([]uint32, header.EntryCount)
if _, err := sr.Seek(int64(header.Header.HeaderSize), io.SeekStart); err != nil {
return nil, err
}
if err := binary.Read(sr, binary.LittleEndian, flags); err != nil {
return nil, err
}
return flags, nil
}
// IsMoreSpecificThan returns true if c is more specific than o.
func (c *ResTableConfig) IsMoreSpecificThan(o *ResTableConfig) bool {
// nil ResTableConfig is never more specific than any ResTableConfig
if c == nil {
return false
}
if o == nil {
return false
}
// imsi
if c.Mcc != o.Mcc {
if c.Mcc == 0 {
return false
}
if o.Mnc == 0 {
return true
}
}
if c.Mnc != o.Mnc {
if c.Mnc == 0 {
return false
}
if o.Mnc == 0 {
return true
}
}
// locale
if diff := c.IsLocaleMoreSpecificThan(o); diff < 0 {
return false
} else if diff > 0 {
return true
}
// screen layout
if c.ScreenLayout != 0 || o.ScreenLayout != 0 {
if ((c.ScreenLayout ^ o.ScreenLayout) & MaskLayoutDir) != 0 {
if (c.ScreenLayout & MaskLayoutDir) == 0 {
return false
}
if (o.ScreenLayout & MaskLayoutDir) == 0 {
return true
}
}
}
// smallest screen width dp
if c.SmallestScreenWidthDp != 0 || o.SmallestScreenWidthDp != 0 {
if c.SmallestScreenWidthDp != o.SmallestScreenWidthDp {
if c.SmallestScreenWidthDp == 0 {
return false
}
if o.SmallestScreenWidthDp == 0 {
return true
}
}
}
// screen size dp
if c.ScreenWidthDp != 0 || o.ScreenWidthDp != 0 ||
c.ScreenHeightDp != 0 || o.ScreenHeightDp != 0 {
if c.ScreenWidthDp != o.ScreenWidthDp {
if c.ScreenWidthDp == 0 {
return false
}
if o.ScreenWidthDp == 0 {
return true
}
}
if c.ScreenHeightDp != o.ScreenHeightDp {
if c.ScreenHeightDp == 0 {
return false
}
if o.ScreenHeightDp == 0 {
return true
}
}
}
// screen layout
if c.ScreenLayout != 0 || o.ScreenLayout != 0 {
if ((c.ScreenLayout ^ o.ScreenLayout) & MaskScreenSize) != 0 {
if (c.ScreenLayout & MaskScreenSize) == 0 {
return false
}
if (o.ScreenLayout & MaskScreenSize) == 0 {
return true
}
}
if ((c.ScreenLayout ^ o.ScreenLayout) & MaskScreenLong) != 0 {
if (c.ScreenLayout & MaskScreenLong) == 0 {
return false
}
if (o.ScreenLayout & MaskScreenLong) == 0 {
return true
}
}
}
// orientation
if c.Orientation != o.Orientation {
if c.Orientation == 0 {
return false
}
if o.Orientation == 0 {
return true
}
}
// uimode
if c.UIMode != 0 || o.UIMode != 0 {
diff := c.UIMode ^ o.UIMode
if (diff & MaskUIModeType) != 0 {
if (c.UIMode & MaskUIModeType) == 0 {
return false
}
if (o.UIMode & MaskUIModeType) == 0 {
return true
}
}
if (diff & MaskUIModeNight) != 0 {
if (c.UIMode & MaskUIModeNight) == 0 {
return false
}
if (o.UIMode & MaskUIModeNight) == 0 {
return true
}
}
}
// touchscreen
if c.Touchscreen != o.Touchscreen {
if c.Touchscreen == 0 {
return false
}
if o.Touchscreen == 0 {
return true
}
}
// input
if c.InputFlags != 0 || o.InputFlags != 0 {
myKeysHidden := c.InputFlags & MaskKeysHidden
oKeysHidden := o.InputFlags & MaskKeysHidden
if (myKeysHidden ^ oKeysHidden) != 0 {
if myKeysHidden == 0 {
return false
}
if oKeysHidden == 0 {
return true
}
}
myNavHidden := c.InputFlags & MaskNavHidden
oNavHidden := o.InputFlags & MaskNavHidden
if (myNavHidden ^ oNavHidden) != 0 {
if myNavHidden == 0 {
return false
}
if oNavHidden == 0 {
return true
}
}
}
if c.Keyboard != o.Keyboard {
if c.Keyboard == 0 {
return false
}
if o.Keyboard == 0 {
return true
}
}
if c.Navigation != o.Navigation {
if c.Navigation == 0 {
return false
}
if o.Navigation == 0 {
return true
}
}
// screen size
if c.ScreenWidth != 0 || o.ScreenWidth != 0 ||
c.ScreenHeight != 0 || o.ScreenHeight != 0 {
if c.ScreenWidth != o.ScreenWidth {
if c.ScreenWidth == 0 {
return false
}
if o.ScreenWidth == 0 {
return true
}
}
if c.ScreenHeight != o.ScreenHeight {
if c.ScreenHeight == 0 {
return false
}
if o.ScreenHeight == 0 {
return true
}
}
}
//version
if c.SDKVersion != o.SDKVersion {
if c.SDKVersion == 0 {
return false
}
if o.SDKVersion == 0 {
return true
}
}
if c.MinorVersion != o.MinorVersion {
if c.MinorVersion == 0 {
return false
}
if o.MinorVersion == 0 {
return true
}
}
return false
}
// IsBetterThan returns true if c is better than o for the r configuration.
func (c *ResTableConfig) IsBetterThan(o *ResTableConfig, r *ResTableConfig) bool {
if r == nil {
return c.IsMoreSpecificThan(o)
}
// nil ResTableConfig is never better than any ResTableConfig
if c == nil {
return false
}
if o == nil {
return false
}
// imsi
if c.Mcc != 0 || c.Mnc != 0 || o.Mcc != 0 || o.Mnc != 0 {
if c.Mcc != o.Mcc && r.Mcc != 0 {
return c.Mcc != 0
}
if c.Mnc != o.Mnc && r.Mnc != 0 {
return c.Mnc != 0
}
}
// locale
if c.IsLocaleBetterThan(o, r) {
return true
}
// screen layout
if c.ScreenLayout != 0 || o.ScreenLayout != 0 {
myLayoutdir := c.ScreenLayout & MaskLayoutDir
oLayoutdir := o.ScreenLayout & MaskLayoutDir
if (myLayoutdir^oLayoutdir) != 0 && (r.ScreenLayout&MaskLayoutDir) != 0 {
return myLayoutdir > oLayoutdir
}
}
// smallest screen width dp
if c.SmallestScreenWidthDp != 0 || o.SmallestScreenWidthDp != 0 {
if c.SmallestScreenWidthDp != o.SmallestScreenWidthDp {
return c.SmallestScreenWidthDp > o.SmallestScreenWidthDp
}
}
// screen size dp
if c.ScreenWidthDp != 0 || c.ScreenHeightDp != 0 || o.ScreenWidthDp != 0 || o.ScreenHeightDp != 0 {
myDelta := 0
otherDelta := 0
if r.ScreenWidthDp != 0 {
myDelta += int(r.ScreenWidthDp) - int(c.ScreenWidthDp)
otherDelta += int(r.ScreenWidthDp) - int(o.ScreenWidthDp)
}
if r.ScreenHeightDp != 0 {
myDelta += int(r.ScreenHeightDp) - int(c.ScreenHeightDp)
otherDelta += int(r.ScreenHeightDp) - int(o.ScreenHeightDp)
}
if myDelta != otherDelta {
return myDelta < otherDelta
}
}
// screen layout
if c.ScreenLayout != 0 || o.ScreenLayout != 0 {
mySL := c.ScreenLayout & MaskScreenSize
oSL := o.ScreenLayout & MaskScreenSize
if (mySL^oSL) != 0 && (r.ScreenLayout&MaskScreenSize) != 0 {
fixedMySL := mySL
fixedOSL := oSL
if (r.ScreenLayout & MaskScreenSize) >= ScreenSizeNormal {
if fixedMySL == 0 {
fixedMySL = ScreenSizeNormal
}
if fixedOSL == 0 {
fixedOSL = ScreenSizeNormal
}
}
if fixedMySL == fixedOSL {
return mySL != 0
}
return fixedMySL > fixedOSL
}
if ((c.ScreenLayout^o.ScreenLayout)&MaskScreenLong) != 0 &&
(r.ScreenLayout&MaskScreenLong) != 0 {
return (c.ScreenLayout & MaskScreenLong) != 0
}
}
// orientation
if c.Orientation != o.Orientation && r.Orientation != 0 {
return c.Orientation != 0
}
// uimode
if c.UIMode != 0 || o.UIMode != 0 {
diff := c.UIMode ^ o.UIMode
if (diff&MaskUIModeType) != 0 && (r.UIMode&MaskUIModeType) != 0 {
return (c.UIMode & MaskUIModeType) != 0
}
if (diff&MaskUIModeNight) != 0 && (r.UIMode&MaskUIModeNight) != 0 {
return (c.UIMode & MaskUIModeNight) != 0
}
}
// screen type
if c.Density != o.Density {
h := int(c.Density)
if h == 0 {
h = 160
}
l := int(o.Density)
if l == 0 {
l = 160
}
blmBigger := true
if l > h {
h, l = l, h
blmBigger = false
}
reqValue := int(r.Density)
if reqValue == 0 {
reqValue = 160
}
if reqValue >= h {
return blmBigger
}
if l >= reqValue {
return !blmBigger
}
if (2*l-reqValue)*h > reqValue*reqValue {
return !blmBigger
}
return blmBigger
}
if c.Touchscreen != o.Touchscreen && r.Touchscreen != 0 {
return c.Touchscreen != 0
}
// input
if c.InputFlags != 0 || o.InputFlags != 0 {
myKeysHidden := c.InputFlags & MaskKeysHidden
oKeysHidden := o.InputFlags & MaskKeysHidden
reqKeysHidden := r.InputFlags & MaskKeysHidden
if myKeysHidden != oKeysHidden && reqKeysHidden != 0 {
switch {
case myKeysHidden == 0:
return false
case oKeysHidden == 0:
return true
case reqKeysHidden == myKeysHidden:
return true
case reqKeysHidden == oKeysHidden:
return false
}
}
myNavHidden := c.InputFlags & MaskNavHidden
oNavHidden := o.InputFlags & MaskNavHidden
reqNavHidden := r.InputFlags & MaskNavHidden
if myNavHidden != oNavHidden && reqNavHidden != 0 {
switch {
case myNavHidden == 0:
return false
case oNavHidden == 0:
return true
}
}
}
if c.Keyboard != o.Keyboard && r.Keyboard != 0 {
return c.Keyboard != 0
}
if c.Navigation != o.Navigation && r.Navigation != 0 {
return c.Navigation != 0
}
// screen size
if c.ScreenWidth != 0 || c.ScreenHeight != 0 || o.ScreenWidth != 0 || o.ScreenHeight != 0 {
myDelta := 0
otherDelta := 0
if r.ScreenWidth != 0 {
myDelta += int(r.ScreenWidth) - int(c.ScreenWidth)
otherDelta += int(r.ScreenWidth) - int(o.ScreenWidth)
}
if r.ScreenHeight != 0 {
myDelta += int(r.ScreenHeight) - int(c.ScreenHeight)
otherDelta += int(r.ScreenHeight) - int(o.ScreenHeight)
}
if myDelta != otherDelta {
return myDelta < otherDelta
}
}
// version
if c.SDKVersion != 0 || o.MinorVersion != 0 {
if c.SDKVersion != o.SDKVersion && r.SDKVersion != 0 {
return c.SDKVersion > o.SDKVersion
}
if c.MinorVersion != o.MinorVersion && r.MinorVersion != 0 {
return c.MinorVersion != 0
}
}
return false
}
// IsLocaleMoreSpecificThan a positive integer if this config is more specific than o,
// a negative integer if |o| is more specific
// and 0 if they're equally specific.
func (c *ResTableConfig) IsLocaleMoreSpecificThan(o *ResTableConfig) int {
if (c.Language != [2]uint8{} || c.Country != [2]uint8{}) || (o.Language != [2]uint8{} || o.Country != [2]uint8{}) {
if c.Language != o.Language {
if c.Language == [2]uint8{} {
return -1
}
if o.Language == [2]uint8{} {
return 1
}
}
if c.Country != o.Country {
if c.Country == [2]uint8{} {
return -1
}
if o.Country == [2]uint8{} {
return 1
}
}
}
return 0
}
// IsLocaleBetterThan returns true if c is a better locale match than o for the r configuration.
func (c *ResTableConfig) IsLocaleBetterThan(o *ResTableConfig, r *ResTableConfig) bool {
if r.Language == [2]uint8{} && r.Country == [2]uint8{} {
// The request doesn't have a locale, so no resource is better
// than the other.
return false
}
if c.Language == [2]uint8{} && c.Country == [2]uint8{} && o.Language == [2]uint8{} && o.Country == [2]uint8{} {
// The locales parts of both resources are empty, so no one is better
// than the other.
return false
}
if c.Language != o.Language {
// The languages of the two resources are not the same.
// the US English resource have traditionally lived for most apps.
if r.Language == [2]uint8{'e', 'n'} {
if r.Country == [2]uint8{'U', 'S'} {
if c.Language != [2]uint8{} {
return c.Country == [2]uint8{} || c.Country == [2]uint8{'U', 'S'}
}
return !(c.Country == [2]uint8{} || c.Country == [2]uint8{'U', 'S'})
}
}
return c.Language != [2]uint8{}
}
if c.Country != o.Country {
return c.Country != [2]uint8{}
}
return false
}
// Match returns true if c can be considered a match for the parameters in settings.
func (c *ResTableConfig) Match(settings *ResTableConfig) bool {
// nil ResTableConfig always matches.
if settings == nil {
return true
} else if c == nil {
return *settings == ResTableConfig{}
}
// match imsi
if settings.Mcc == 0 {
if c.Mcc != 0 {
return false
}
} else {
if c.Mcc != 0 && c.Mcc != settings.Mcc {
return false
}
}
if settings.Mnc == 0 {
if c.Mnc != 0 {
return false
}
} else {
if c.Mnc != 0 && c.Mnc != settings.Mnc {
return false
}
}
// match locale
if c.Language != [2]uint8{0, 0} {
// Don't consider country and variants when deciding matches.
// If two configs differ only in their country and variant,
// they can be weeded out in the isMoreSpecificThan test.
if c.Language != settings.Language {
return false
}
if c.Country != [2]uint8{0, 0} {
if c.Country != settings.Country {
return false
}
}
}
// screen layout
layoutDir := c.ScreenLayout & MaskLayoutDir
setLayoutDir := settings.ScreenLayout & MaskLayoutDir
if layoutDir != 0 && layoutDir != setLayoutDir {
return false
}
screenSize := c.ScreenLayout & MaskScreenSize
setScreenSize := settings.ScreenLayout & MaskScreenSize
if screenSize != 0 && screenSize > setScreenSize {
return false
}
screenLong := c.ScreenLayout & MaskScreenLong
setScreenLong := settings.ScreenLayout & MaskScreenLong
if screenLong != 0 && screenLong != setScreenLong {
return false
}
// ui mode
uiModeType := c.UIMode & MaskUIModeType
setUIModeType := settings.UIMode & MaskUIModeType
if uiModeType != 0 && uiModeType != setUIModeType {
return false
}
uiModeNight := c.UIMode & MaskUIModeNight
setUIModeNight := settings.UIMode & MaskUIModeNight
if uiModeNight != 0 && uiModeNight != setUIModeNight {
return false
}
// smallest screen width dp
if c.SmallestScreenWidthDp != 0 &&
c.SmallestScreenWidthDp > settings.SmallestScreenWidthDp {
return false
}
// screen size dp
if c.ScreenWidthDp != 0 &&
c.ScreenWidthDp > settings.ScreenWidthDp {
return false
}
if c.ScreenHeightDp != 0 &&
c.ScreenHeightDp > settings.ScreenHeightDp {
return false
}
// screen type
if c.Orientation != 0 && c.Orientation != settings.Orientation {
return false
}
if c.Touchscreen != 0 && c.Touchscreen != settings.Touchscreen {
return false
}
// input
if c.InputFlags != 0 {
myKeysHidden := c.InputFlags & MaskKeysHidden
oKeysHidden := settings.InputFlags & MaskKeysHidden
if myKeysHidden != 0 && myKeysHidden != oKeysHidden {
if myKeysHidden != KeysHiddenNo || oKeysHidden != KeysHiddenSoft {
return false
}
}
myNavHidden := c.InputFlags & MaskNavHidden
oNavHidden := settings.InputFlags & MaskNavHidden
if myNavHidden != 0 && myNavHidden != oNavHidden {
return false
}
}
if c.Keyboard != 0 && c.Keyboard != settings.Keyboard {
return false
}
if c.Navigation != 0 && c.Navigation != settings.Navigation {
return false
}
// screen size
if c.ScreenWidth != 0 &&
c.ScreenWidth > settings.ScreenWidth {
return false
}
if c.ScreenHeight != 0 &&
c.ScreenHeight > settings.ScreenHeight {
return false
}
// version
if settings.SDKVersion != 0 && c.SDKVersion != 0 &&
c.SDKVersion > settings.SDKVersion {
return false
}
if settings.MinorVersion != 0 && c.MinorVersion != 0 &&
c.MinorVersion != settings.MinorVersion {
return false
}
return true
}
// Locale returns the locale of the configuration.
func (c *ResTableConfig) Locale() string {
if c.Language[0] == 0 {
return ""
}
if c.Country[0] == 0 {
return fmt.Sprintf("%c%c", c.Language[0], c.Language[1])
}
return fmt.Sprintf("%c%c-%c%c", c.Language[0], c.Language[1], c.Country[0], c.Country[1])
}
| readTableType |
matrix.js | // operations on matrix
// game parameters
var matrixSizeX = 32;
var matrixSizeY = 32;
var startPercent = 50;
var shouldArgue = false;
// currnetly processed cell
var chosenX = matrixSizeX+1;
var chosenY = matrixSizeY+1;
// matrix
var matrix = new Array(matrixSizeX).fill(0).map(() => new Array(matrixSizeY).fill(0));
// painting matrix parameters
const negColor = "#fe5f55";
const posColor = "#6ace96";
const negChosenColor = "#E63A2E";
const posChosenColor = "#2E9A5D";
const negParamsChangedColor = "#F36B61";
const posParamsChangedColor = "#6CB38B";
const strokeColor = "#585b64";
const chosenColor = "#fff";
const cellSize = 20;
testMatrix = [
[1,0,0,1,1,0,1,0,1,0],
[1,1,0,0,1,0,0,1,1,0],
[0,0,1,1,1,1,0,1,0,1],
[1,1,1,1,1,0,1,0,1,1],
[0,1,0,1,1,0,0,0,1,0],
[1,0,1,0,0,1,1,1,0,1],
[0,1,1,1,0,1,0,0,0,0],
[1,0,0,1,1,0,0,0,1,0],
[0,0,1,1,1,1,1,1,1,0],
[1,1,1,0,0,1,0,1,0,1]
]
function updateMatrix() {
chosenX = Math.floor(Math.random() * Math.floor(matrixSizeX));
chosenY = Math.floor(Math.random() * Math.floor(matrixSizeY));
if (getVal(chosenX, chosenY) == getVal(chosenX, chosenY+1)) {
//influece
val = getVal(chosenX, chosenY);
setVal(chosenX-1, chosenY, val);
setVal(chosenX-1, chosenY+1, val);
setVal(chosenX, chosenY-1, val);
setVal(chosenX, chosenY+2, val);
setVal(chosenX+1, chosenY, val);
setVal(chosenX+1, chosenY+1, val);
} else if (shouldArgue) {
// argue neighbours
val1 = getVal(chosenX, chosenY);
val2 = getVal(chosenX, chosenY+1);
// neighbours of (x, y) - val1
setVal(chosenX-1, chosenY, val2);
setVal(chosenX+1, chosenY, val2);
setVal(chosenX, chosenY-1, val2);
// neighbours of (x, y+1) - val2
setVal(chosenX-1, chosenY+1, val1);
setVal(chosenX+1, chosenY+1, val1);
setVal(chosenX, chosenY+2, val1);
}
}
function | (i, j){
if (i < 0) tmp_i = matrixSizeX - 1;
else if (i > matrixSizeX - 1 ) tmp_i = 0;
else tmp_i = i;
if (j < 0) tmp_j = matrixSizeY - 1;
else if (j > matrixSizeY - 1 ) tmp_j = 0;
else tmp_j = j;
return matrix[tmp_i][tmp_j];
}
function setVal(i, j, val){
if (i < 0) tmp_i = matrixSizeX - 1;
else if (i > matrixSizeX - 1 ) tmp_i = i % (matrixSizeX);
else tmp_i = i;
if (j < 0) tmp_j = matrixSizeY - 1;
else if (j > matrixSizeY - 1 ) tmp_j = j % (matrixSizeY);
else tmp_j = j;
//console.log("values are", tmp_i, tmp_j);
matrix[tmp_i][tmp_j] = val;
}
function initMatrix() {
matrix = new Array(matrixSizeX).fill(0).map(() => new Array(matrixSizeY).fill(0));
for (var i = 0; i < matrixSizeX; ++i) {
for (var j = 0; j < matrixSizeY; ++j) {
matrix[i][j] = Math.random() < startPercent / 100.0 ? 1 : 0;
}
}
}
function paintMatrix() {
const drawBoard = (ctx, step) => {
var posCol, negCol, posChosenCol, negChosenCol;
if (paramsChanged){
posCol = posParamsChangedColor;
negCol = negParamsChangedColor;
posChosenCol = posParamsChangedColor;
negChosenCol = negParamsChangedColor;
} else {
posCol = posColor;
negCol = negColor;
posChosenCol = posChosenColor;
negChosenCol = negChosenColor;
}
for (var i = 0; i < matrixSizeX; ++i) {
for (var j = 0; j < matrixSizeY; ++j) {
var pCol, nCol, sCol;
if (i == chosenX
&& (
(j == chosenY || j == chosenY + 1)
||
(chosenY == matrixSizeY - 1 && j == 0))
) {
pCol = posChosenCol;
nCol = negChosenCol;
sCol = chosenColor;
} else {
pCol = posCol;
nCol = negCol;
sCol = strokeColor;
}
ctx.fillStyle = (matrix[i][j] == 1) ? pCol : nCol;
ctx.fillRect(j * step, i * step, step, step);
ctx.beginPath();
ctx.strokeStyle = sCol;
ctx.strokeRect(j * step, i * step, step, step);
ctx.closePath();
}
}
};
const c = document.getElementById('matrix');
c.height = cellSize * matrixSizeX;
c.width = cellSize * matrixSizeY;
const ctx = c.getContext("2d");
drawBoard(ctx, cellSize);
}
function sumMatrix() {
return matrix.reduce(function (a, b) { return a.concat(b) })
.reduce(function (a, b) { return a + b });
}
// proportions matrix to window size
divideX = 25;
divideY = 45;
function setMatrixSize() {
paramsChanged = true;
matrixSizeX = Number(document.getElementById("matrixW").value);
matrixSizeY = Number(document.getElementById("matrixH").value);
maxX = Math.floor(window.innerHeight / divideX);
maxY = Math.floor(window.innerWidth / divideY);
if (matrixSizeX < 10){
console.log("Za mała wartość parametru W!");
matrixSizeX = 10;
document.getElementById("matrixW").value = matrixSizeX;
}
if (matrixSizeX > maxX){
console.log("Za duża wartość parametru! Maksymalna wartość: " + maxX);
matrixSizeX = maxX;
document.getElementById("matrixW").value = matrixSizeX;
}
if (matrixSizeY < 10) {
console.log("Za mała wartość parametru H!");
matrixSizeY = 10;
document.getElementById("matrixH").value = matrixSizeY;
}
if (matrixSizeY > maxY){
console.log("Za duża wartość parametru! Maksymalna wartość: " + maxY);
matrixSizeY = maxY;
document.getElementById("matrixH").value = matrixSizeY;
}
matrix = new Array(matrixSizeX).fill(0).map(() => new Array(matrixSizeY).fill(0));
chosenX = matrixSizeX+1;
chosenY = matrixSizeY+1;
initMatrix();
paintMatrix();
}
| getVal |
dim2.py | """
Tests for 2D compatibility.
"""
import numpy as np
import pytest
from pandas._libs.missing import is_matching_na
import pandas as pd
from pandas.core.arrays.integer import INT_STR_TO_DTYPE
from pandas.tests.extension.base.base import BaseExtensionTests
class Dim2CompatTests(BaseExtensionTests):
def test_transpose(self, data):
arr2d = data.repeat(2).reshape(-1, 2)
shape = arr2d.shape
assert shape[0] != shape[-1] # otherwise the rest of the test is useless
assert arr2d.T.shape == shape[::-1]
def test_frame_from_2d_array(self, data):
arr2d = data.repeat(2).reshape(-1, 2)
df = pd.DataFrame(arr2d)
expected = pd.DataFrame({0: arr2d[:, 0], 1: arr2d[:, 1]})
self.assert_frame_equal(df, expected)
def test_swapaxes(self, data):
arr2d = data.repeat(2).reshape(-1, 2)
result = arr2d.swapaxes(0, 1)
expected = arr2d.T
self.assert_extension_array_equal(result, expected)
def test_delete_2d(self, data):
arr2d = data.repeat(3).reshape(-1, 3)
# axis = 0
result = arr2d.delete(1, axis=0)
expected = data.delete(1).repeat(3).reshape(-1, 3)
self.assert_extension_array_equal(result, expected)
# axis = 1
result = arr2d.delete(1, axis=1)
expected = data.repeat(2).reshape(-1, 2)
self.assert_extension_array_equal(result, expected)
def test_take_2d(self, data):
arr2d = data.reshape(-1, 1)
result = arr2d.take([0, 0, -1], axis=0)
expected = data.take([0, 0, -1]).reshape(-1, 1)
self.assert_extension_array_equal(result, expected)
def test_repr_2d(self, data):
# this could fail in a corner case where an element contained the name
res = repr(data.reshape(1, -1))
assert res.count(f"<{type(data).__name__}") == 1
res = repr(data.reshape(-1, 1))
assert res.count(f"<{type(data).__name__}") == 1
def | (self, data):
arr2d = data.reshape(-1, 1)
assert arr2d.shape == (data.size, 1)
assert len(arr2d) == len(data)
arr2d = data.reshape((-1, 1))
assert arr2d.shape == (data.size, 1)
assert len(arr2d) == len(data)
with pytest.raises(ValueError):
data.reshape((data.size, 2))
with pytest.raises(ValueError):
data.reshape(data.size, 2)
def test_getitem_2d(self, data):
arr2d = data.reshape(1, -1)
result = arr2d[0]
self.assert_extension_array_equal(result, data)
with pytest.raises(IndexError):
arr2d[1]
with pytest.raises(IndexError):
arr2d[-2]
result = arr2d[:]
self.assert_extension_array_equal(result, arr2d)
result = arr2d[:, :]
self.assert_extension_array_equal(result, arr2d)
result = arr2d[:, 0]
expected = data[[0]]
self.assert_extension_array_equal(result, expected)
# dimension-expanding getitem on 1D
result = data[:, np.newaxis]
self.assert_extension_array_equal(result, arr2d.T)
def test_iter_2d(self, data):
arr2d = data.reshape(1, -1)
objs = list(iter(arr2d))
assert len(objs) == arr2d.shape[0]
for obj in objs:
assert isinstance(obj, type(data))
assert obj.dtype == data.dtype
assert obj.ndim == 1
assert len(obj) == arr2d.shape[1]
def test_tolist_2d(self, data):
arr2d = data.reshape(1, -1)
result = arr2d.tolist()
expected = [data.tolist()]
assert isinstance(result, list)
assert all(isinstance(x, list) for x in result)
assert result == expected
def test_concat_2d(self, data):
left = type(data)._concat_same_type([data, data]).reshape(-1, 2)
right = left.copy()
# axis=0
result = left._concat_same_type([left, right], axis=0)
expected = data._concat_same_type([data] * 4).reshape(-1, 2)
self.assert_extension_array_equal(result, expected)
# axis=1
result = left._concat_same_type([left, right], axis=1)
assert result.shape == (len(data), 4)
self.assert_extension_array_equal(result[:, :2], left)
self.assert_extension_array_equal(result[:, 2:], right)
# axis > 1 -> invalid
msg = "axis 2 is out of bounds for array of dimension 2"
with pytest.raises(ValueError, match=msg):
left._concat_same_type([left, right], axis=2)
@pytest.mark.parametrize("method", ["backfill", "pad"])
def test_fillna_2d_method(self, data_missing, method):
arr = data_missing.repeat(2).reshape(2, 2)
assert arr[0].isna().all()
assert not arr[1].isna().any()
result = arr.fillna(method=method)
expected = data_missing.fillna(method=method).repeat(2).reshape(2, 2)
self.assert_extension_array_equal(result, expected)
@pytest.mark.parametrize("method", ["mean", "median", "var", "std", "sum", "prod"])
def test_reductions_2d_axis_none(self, data, method):
arr2d = data.reshape(1, -1)
err_expected = None
err_result = None
try:
expected = getattr(data, method)()
except Exception as err:
# if the 1D reduction is invalid, the 2D reduction should be as well
err_expected = err
try:
result = getattr(arr2d, method)(axis=None)
except Exception as err2:
err_result = err2
else:
result = getattr(arr2d, method)(axis=None)
if err_result is not None or err_expected is not None:
assert type(err_result) == type(err_expected)
return
assert is_matching_na(result, expected) or result == expected
@pytest.mark.parametrize("method", ["mean", "median", "var", "std", "sum", "prod"])
def test_reductions_2d_axis0(self, data, method):
arr2d = data.reshape(1, -1)
kwargs = {}
if method == "std":
# pass ddof=0 so we get all-zero std instead of all-NA std
kwargs["ddof"] = 0
try:
result = getattr(arr2d, method)(axis=0, **kwargs)
except Exception as err:
try:
getattr(data, method)()
except Exception as err2:
assert type(err) == type(err2)
return
else:
raise AssertionError("Both reductions should raise or neither")
def get_reduction_result_dtype(dtype):
# windows and 32bit builds will in some cases have int32/uint32
# where other builds will have int64/uint64.
if dtype.itemsize == 8:
return dtype
elif dtype.kind in "ib":
return INT_STR_TO_DTYPE[np.dtype(int).name]
else:
# i.e. dtype.kind == "u"
return INT_STR_TO_DTYPE[np.dtype(np.uint).name]
if method in ["mean", "median", "sum", "prod"]:
# std and var are not dtype-preserving
expected = data
if method in ["sum", "prod"] and data.dtype.kind in "iub":
dtype = get_reduction_result_dtype(data.dtype)
expected = data.astype(dtype)
if data.dtype.kind == "b" and method in ["sum", "prod"]:
# We get IntegerArray instead of BooleanArray
pass
else:
assert type(expected) == type(data), type(expected)
assert dtype == expected.dtype
self.assert_extension_array_equal(result, expected)
elif method == "std":
self.assert_extension_array_equal(result, data - data)
# punt on method == "var"
@pytest.mark.parametrize("method", ["mean", "median", "var", "std", "sum", "prod"])
def test_reductions_2d_axis1(self, data, method):
arr2d = data.reshape(1, -1)
try:
result = getattr(arr2d, method)(axis=1)
except Exception as err:
try:
getattr(data, method)()
except Exception as err2:
assert type(err) == type(err2)
return
else:
raise AssertionError("Both reductions should raise or neither")
# not necessarily type/dtype-preserving, so weaker assertions
assert result.shape == (1,)
expected_scalar = getattr(data, method)()
res = result[0]
assert is_matching_na(res, expected_scalar) or res == expected_scalar
class NDArrayBacked2DTests(Dim2CompatTests):
# More specific tests for NDArrayBackedExtensionArray subclasses
def test_copy_order(self, data):
# We should be matching numpy semantics for the "order" keyword in 'copy'
arr2d = data.repeat(2).reshape(-1, 2)
assert arr2d._ndarray.flags["C_CONTIGUOUS"]
res = arr2d.copy()
assert res._ndarray.flags["C_CONTIGUOUS"]
res = arr2d[::2, ::2].copy()
assert res._ndarray.flags["C_CONTIGUOUS"]
res = arr2d.copy("F")
assert not res._ndarray.flags["C_CONTIGUOUS"]
assert res._ndarray.flags["F_CONTIGUOUS"]
res = arr2d.copy("K")
assert res._ndarray.flags["C_CONTIGUOUS"]
res = arr2d.T.copy("K")
assert not res._ndarray.flags["C_CONTIGUOUS"]
assert res._ndarray.flags["F_CONTIGUOUS"]
# order not accepted by numpy
msg = r"order must be one of 'C', 'F', 'A', or 'K' \(got 'Q'\)"
with pytest.raises(ValueError, match=msg):
arr2d.copy("Q")
# neither contiguity
arr_nc = arr2d[::2]
assert not arr_nc._ndarray.flags["C_CONTIGUOUS"]
assert not arr_nc._ndarray.flags["F_CONTIGUOUS"]
assert arr_nc.copy()._ndarray.flags["C_CONTIGUOUS"]
assert not arr_nc.copy()._ndarray.flags["F_CONTIGUOUS"]
assert arr_nc.copy("C")._ndarray.flags["C_CONTIGUOUS"]
assert not arr_nc.copy("C")._ndarray.flags["F_CONTIGUOUS"]
assert not arr_nc.copy("F")._ndarray.flags["C_CONTIGUOUS"]
assert arr_nc.copy("F")._ndarray.flags["F_CONTIGUOUS"]
assert arr_nc.copy("K")._ndarray.flags["C_CONTIGUOUS"]
assert not arr_nc.copy("K")._ndarray.flags["F_CONTIGUOUS"]
| test_reshape |
app.py | #!/usr/bin/env python
# coding:utf-8
import tornado.web
import tornado.ioloop
from tornado.options import define, options, parse_command_line
import signal
import threading
import time
def close_server():
from custor.logger import logger
MAX_WAIT_SECONDS_BEFORE_SHUTDOWN = 0
deadline = time.time() + MAX_WAIT_SECONDS_BEFORE_SHUTDOWN
def stop_loop():
|
stop_loop()
logger.info("...close_httpserver():ready...")
# handle signal
def server_shutdown_handler(sig, frame):
from custor.logger import logger
logger.warning('...Caught signal: {0}'.format(sig))
tornado.ioloop.IOLoop.instance().add_callback(close_server)
def runserver():
# define("port", default=8888, help="run on the given port", type=int)
define("config", default='', help="config", type=str)
parse_command_line()
# maybe we can log with file
# parse_config_file()
import settings.config
config = settings.config.load_config(options.config)
settings.config.config = config
from app.cache import update_cache
update_cache()
from custor.handlers.basehandler import ErrorHandler
from app import urls
handlers = ()
handlers += urls.urlpattern
handlers += tuple((x[0], tornado.web.StaticFileHandler, x[1]) for x in config.STATIC_PATH)
from custor import uimethods
ui_build_methods = {
'datetime_delta': uimethods.datetime_delta,
'is_default_avatar': uimethods.is_default_avatar
}
application = tornado.web.Application(
handlers=handlers,
ui_methods=ui_build_methods,
default_handler_class=ErrorHandler,
debug=config.DEBUG,
template_path=config.TEMPLATE_PATH,
login_url=config.LOGIN_URL,
cookie_secret=config.COOKIE_SECRET,
)
# added signal callback to interrupt app
signal.signal(signal.SIGINT, server_shutdown_handler)
signal.signal(signal.SIGTERM, server_shutdown_handler)
application.listen(config.PORT)
from custor.logger import logger
logger.debug('Server started at port %s' % config.PORT)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
runserver()
| now = time.time()
io_loop = tornado.ioloop.IOLoop.instance()
if now < deadline and (io_loop._callbacks or io_loop._timeouts):
io_loop.add_timeout(now + 1, stop_loop)
else:
io_loop.stop()
logger.info('...Shutdown...')
signal.pthread_kill(threading.current_thread().ident, 9) |
agile_test.go | package internal
import (
"testing"
"time"
"github.com/pinpt/agent/v4/sdk"
"github.com/stretchr/testify/assert"
)
func TestSprintStatusMap(t *testing.T) {
assert := assert.New(t)
assert.Equal(sdk.AgileSprintStatusFuture, sprintStateMap["future"])
assert.Equal(sdk.AgileSprintStatusActive, sprintStateMap["active"])
assert.Equal(sdk.AgileSprintStatusClosed, sprintStateMap["closed"])
}
func TestMakeSprintUpdateName(t *testing.T) {
assert := assert.New(t)
evt := sdk.AgileSprintUpdateMutation{}
evt.Set.Name = sdk.StringPointer("my sprint 1")
update, updated, err := makeSprintUpdate("5", &evt)
assert.NoError(err)
assert.True(updated)
assert.Equal("{\"id\":5,\"name\":\"my sprint 1\"}", sdk.Stringify(update))
}
func TestMakeSprintUpdateGoal(t *testing.T) {
assert := assert.New(t)
evt := sdk.AgileSprintUpdateMutation{}
evt.Set.Goal = sdk.StringPointer("get things done")
update, updated, err := makeSprintUpdate("5", &evt)
assert.NoError(err)
assert.True(updated)
assert.Equal("{\"id\":5,\"goal\":\"get things done\"}", sdk.Stringify(update))
}
func | (t *testing.T) {
assert := assert.New(t)
evt := sdk.AgileSprintUpdateMutation{}
ts, err := time.Parse("2006-01-02", "2020-09-22")
assert.NoError(err)
date := sdk.NewDateWithTime(ts)
evt.Set.StartDate = date
update, updated, err := makeSprintUpdate("5", &evt)
assert.NoError(err)
assert.True(updated)
assert.Equal("{\"id\":5,\"startDate\":\"2020-09-22T00:00:00+00:00\"}", sdk.Stringify(update))
}
func TestMakeSprintUpdateEndDate(t *testing.T) {
assert := assert.New(t)
evt := sdk.AgileSprintUpdateMutation{}
ts, err := time.Parse("2006-01-02", "2020-09-22")
assert.NoError(err)
date := sdk.NewDateWithTime(ts)
evt.Set.EndDate = date
update, updated, err := makeSprintUpdate("5", &evt)
assert.NoError(err)
assert.True(updated)
assert.Equal("{\"id\":5,\"endDate\":\"2020-09-22T00:00:00+00:00\"}", sdk.Stringify(update))
}
func TestMakeSprintUpdateStatus(t *testing.T) {
assert := assert.New(t)
evt := sdk.AgileSprintUpdateMutation{}
v := sdk.AgileSprintStatusClosed
evt.Set.Status = &v
update, updated, err := makeSprintUpdate("5", &evt)
assert.NoError(err)
assert.True(updated)
assert.Equal("{\"id\":5,\"state\":\"closed\"}", sdk.Stringify(update))
}
| TestMakeSprintUpdateStartDate |
binding.gyp | {
'variables': {
'module_name%': 'node_printer',
'module_path%': './lib/'
},
'targets': [
{
"target_name": "action_after_build",
"type": "none",
"dependencies": [ "<(module_name)" ],
"copies": [
{
"files": [ "<(PRODUCT_DIR)/<(module_name).node" ],
"destination": "<(module_path)"
}
]
},
{
'target_name': 'node_printer',
'sources': [
# is like "ls -1 src/*.cc", but gyp does not support direct patterns on
# sources
'<!@(["python", "tools/getSourceFiles.py", "src", "cc"])'
],
'conditions': [
# common exclusions
['OS!="linux"', {'sources/': [['exclude', '_linux\\.cc$']]}],
['OS!="mac"', {'sources/': [['exclude', '_mac\\.cc|mm?$']]}],
['OS!="win"', {
'sources/': [['exclude', '_win\\.cc$']]}, {
# else if OS==win, exclude also posix files
'sources/': [['exclude', '_posix\\.cc$']]
}],
# specific settings
['OS!="win"', {
'cflags':[
'<!(cups-config --cflags)'
],
'ldflags':[
'<!(cups-config --libs)'
#'-lcups -lgssapi_krb5 -lkrb5 -lk5crypto -lcom_err -lz -lpthread -lm -lcrypt -lz'
],
'libraries':[
'<!(cups-config --libs)'
#'-lcups -lgssapi_krb5 -lkrb5 -lk5crypto -lcom_err -lz -lpthread -lm -lcrypt -lz'
],
'link_settings': {
'libraries': [
'<!(cups-config --libs)' | ]
}
]
} | ]
}
}] |
test-node-discover.js | /**
* Copyright 2020 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {ContextNode} from '../../../src/context/node';
import {deepScan, findParent} from '../../../src/context/scan';
import {domOrderComparator} from '../../../src/core/dom';
describes.realWin('ContextNode', {}, (env) => {
let win, doc;
let tree;
let clock;
let discoverWrapper;
beforeEach(() => {
win = env.win;
doc = win.document;
clock = env.sandbox.useFakeTimers();
tree = (() => {
function createSubtree(id, children, depth) {
const el = doc.createElement('div');
el.id = id;
el.textContent = id;
if (depth > 1) {
for (let i = 0; i < children; i++) {
const child = createSubtree(`${id}-${i + 1}`, children, depth - 1);
el.appendChild(child);
}
}
return el;
}
return createSubtree('T', 4, 4);
})();
discoverWrapper = wrapper(ContextNode.prototype, 'discover_');
// Customize output of the ContextNode for easy debug.
ContextNode.prototype.inspect = function () {
const contextNode = this;
return `ContextNode(${contextNode.node.id || contextNode.node.nodeName})`;
};
});
afterEach(() => {
delete ContextNode.prototype.inspect;
});
function | (id) {
if (id == 'T') {
return tree;
}
const found = tree.querySelector(`#${id}`);
if (!found) {
throw new Error(`element not found ${id}`);
}
return found;
}
/**
* @param {Object} obj
* @param {string} name
*/
function wrapper(obj, name) {
const original = obj[name];
const stub = env.sandbox.stub(ContextNode.prototype, name);
const wrapperName = `__wrapper_${name}`;
stub.callsFake(function (...args) {
const obj = this;
const resolvers = obj[wrapperName] ?? (obj[wrapperName] = []);
const result = original.apply(this, args);
const current = resolvers.slice(0);
resolvers.length = 0;
current.forEach((resolver) => resolver(result));
return result;
});
return {
spy: stub,
waitFor: (obj) => {
const resolvers = obj[wrapperName] ?? (obj[wrapperName] = []);
return new Promise((resolve) => {
resolvers.push(resolve);
});
},
};
}
function waitForDiscover(...nodesOrContextNodes) {
const contextNodes = nodesOrContextNodes.map((arg) =>
arg.nodeType ? ContextNode.get(arg) : arg
);
const promises = contextNodes.map((contextNode) =>
discoverWrapper.waitFor(contextNode)
);
clock.tick(1);
return Promise.all(promises);
}
function rediscover(...nodesOrContextNodes) {
const contextNodes = nodesOrContextNodes.map((arg) =>
arg.nodeType ? ContextNode.get(arg) : arg
);
contextNodes.forEach((cn) => cn.discover());
return waitForDiscover.apply(null, contextNodes);
}
function expectContext(nodeOrContext, spec) {
const contextNode = nodeOrContext.nodeType
? ContextNode.get(nodeOrContext)
: nodeOrContext;
if (spec.isRoot !== undefined) {
expect(contextNode.isRoot, 'isRoot').to.equal(spec.isRoot);
}
if (spec.root !== undefined) {
const {root} = contextNode;
expect((root && root.node) ?? null, 'root').to.equal(spec.root);
}
if (spec.discoverable !== undefined) {
expect(contextNode.isDiscoverable(), 'discoverable').to.equal(
spec.discoverable
);
}
if (spec.parent !== undefined) {
const {parent} = contextNode;
const parentNode = (parent && parent.node) ?? null;
const specNode =
((spec.parent && spec.parent.node) || spec.parent) ?? null;
expect(parentNode, 'parent').to.equal(specNode);
}
if (spec.children !== undefined) {
const children = (contextNode.children || []).map((cn) => cn.node || cn);
children.sort(domOrderComparator);
const specChildren = spec.children.slice(0);
specChildren.sort(domOrderComparator);
expect(children, 'children').to.deep.equal(specChildren);
}
}
describe('ContextNode.get', () => {
it('should create an element node', () => {
const el = doc.createElement('div');
const cn = ContextNode.get(el);
expect(cn.node).to.equal(el);
// Parent always starts as null.
expectContext(cn, {
parent: null,
isRoot: false,
root: null,
children: [],
discoverable: true,
});
});
it('should create a fragment node', () => {
const frag = doc.createDocumentFragment();
const cn = ContextNode.get(frag);
expect(cn.node).to.equal(frag);
// Parent always starts as null.
expectContext(cn, {
parent: null,
isRoot: false,
root: null,
children: [],
discoverable: true,
});
});
it('should create a document node', () => {
const cn = ContextNode.get(doc);
expect(cn.node).to.equal(doc);
// Parent always starts as null.
expectContext(cn, {
parent: null,
isRoot: true,
root: doc,
children: [],
discoverable: false,
});
});
it('should create a node only once', () => {
const el = doc.createElement('div');
const frag = doc.createDocumentFragment();
const cn1 = ContextNode.get(el);
const cn2 = ContextNode.get(frag);
const cn3 = ContextNode.get(doc);
expect(cn1).to.equal(ContextNode.get(el));
expect(cn2).to.equal(ContextNode.get(frag));
expect(cn3).to.equal(ContextNode.get(doc));
});
});
describe('ContextNode.closest', () => {
let element;
let grandparent;
beforeEach(() => {
element = el('T-1-1-1');
grandparent = el('T-1');
});
it('should find itself', () => {
const elementContext = ContextNode.get(element);
expect(ContextNode.closest(element)).to.equal(elementContext);
});
it('should skip itself', () => {
ContextNode.get(element);
const grandparentContext = ContextNode.get(grandparent);
expect(ContextNode.closest(element, false)).to.equal(grandparentContext);
expect(ContextNode.closest(grandparent, false)).to.be.null;
});
it('should go up DOM tree', () => {
const grandparentContext = ContextNode.get(grandparent);
expect(ContextNode.closest(element)).to.equal(grandparentContext);
});
it('should auto-create root when connected', () => {
doc.body.appendChild(tree);
expect(ContextNode.closest(element)).to.equal(ContextNode.get(doc));
expectContext(ContextNode.get(doc), {isRoot: true});
});
it('should auto-create root on a fragment', () => {
const frag = doc.createDocumentFragment();
frag.appendChild(tree);
expect(ContextNode.closest(element)).to.equal(ContextNode.get(frag));
expectContext(ContextNode.get(frag), {
isRoot: false,
root: null,
parent: null,
});
});
it('should auto-create a custom AMP element', () => {
const ampElement = doc.createElement('amp-element');
ampElement.appendChild(tree);
expect(ContextNode.closest(element)).to.equal(
ContextNode.get(ampElement)
);
expectContext(ContextNode.get(ampElement), {
isRoot: false,
root: null,
parent: null,
});
});
});
describe('discover', () => {
let sibling1;
let sibling2;
let cousin1;
let parent;
let grandparent;
beforeEach(() => {
sibling1 = el('T-1-1-1');
sibling2 = el('T-1-1-2');
cousin1 = el('T-1-2-1');
parent = el('T-1-1');
grandparent = el('T-1');
});
it('should be created in an undiscovered mode', () => {
expectContext(sibling1, {parent: null, discoverable: true});
expectContext(sibling2, {parent: null, discoverable: true});
expectContext(cousin1, {parent: null, discoverable: true});
});
describe('disconnected tree', () => {
beforeEach(async () => {
await waitForDiscover(sibling1, sibling2, cousin1);
});
it('should not auto-discover an orphan', async () => {
expectContext(sibling1, {
parent: null,
children: [],
root: null,
isRoot: false,
discoverable: true,
});
expectContext(sibling2, {
parent: null,
children: [],
root: null,
isRoot: false,
discoverable: true,
});
expectContext(cousin1, {
parent: null,
children: [],
root: null,
isRoot: false,
discoverable: true,
});
});
it('should auto-discover the grandparent', async () => {
await waitForDiscover(grandparent);
await rediscover(sibling1, sibling2, cousin1);
expectContext(sibling1, {
isRoot: false,
root: null,
parent: grandparent,
children: [],
discoverable: true,
});
expectContext(sibling2, {
isRoot: false,
root: null,
parent: grandparent,
children: [],
discoverable: true,
});
expectContext(cousin1, {
isRoot: false,
root: null,
parent: grandparent,
children: [],
discoverable: true,
});
expectContext(grandparent, {
isRoot: false,
root: null,
parent: null,
children: [sibling1, sibling2, cousin1],
discoverable: true,
});
const element3 = el('T-1-1-3');
await waitForDiscover(element3);
expectContext(element3, {
isRoot: false,
root: null,
parent: grandparent,
children: [],
discoverable: true,
});
expectContext(grandparent, {
isRoot: false,
root: null,
parent: null,
children: [sibling1, sibling2, cousin1, element3],
discoverable: true,
});
});
it('should insert an intermediary parent', async () => {
await waitForDiscover(grandparent);
await rediscover(sibling1, sibling2, cousin1);
expectContext(sibling1, {parent: grandparent});
expectContext(sibling2, {parent: grandparent});
expectContext(cousin1, {parent: grandparent});
await waitForDiscover(parent);
expectContext(sibling1, {
parent,
children: [],
isRoot: false,
root: null,
discoverable: true,
});
expectContext(sibling2, {
parent,
children: [],
isRoot: false,
root: null,
discoverable: true,
});
expectContext(parent, {
parent: grandparent,
children: [sibling1, sibling2],
isRoot: false,
root: null,
discoverable: true,
});
expectContext(grandparent, {
parent: null,
children: [parent, cousin1],
isRoot: false,
root: null,
discoverable: true,
});
});
it('should assign/unassign a slot', async () => {
await waitForDiscover(grandparent, parent);
await rediscover(sibling1, sibling2, cousin1);
expectContext(sibling1, {parent});
expectContext(sibling2, {parent});
expectContext(cousin1, {grandparent});
expectContext(grandparent, {children: [parent, cousin1]});
expectContext(parent, {children: [sibling1, sibling2]});
// Slot cousin1 under parent.
ContextNode.assignSlot(cousin1, el('T-1-1-3'));
await rediscover(cousin1);
expectContext(cousin1, {parent});
expectContext(grandparent, {children: [parent]});
expectContext(parent, {children: [sibling1, sibling2, cousin1]});
// Unslot cousin1.
ContextNode.unassignSlot(cousin1, el('T-1-1-3'));
await rediscover(cousin1);
expectContext(cousin1, {parent: grandparent});
expectContext(grandparent, {children: [parent, cousin1]});
expectContext(parent, {children: [sibling1, sibling2]});
});
it('should assign/unassign a slot in Shadow DOM', async () => {
if (Element.prototype.attachShadow) {
return;
}
await waitForDiscover(grandparent, parent);
const grandchild = doc.createElement('div');
sibling1.appendChild(grandchild);
await waitForDiscover(grandchild);
const shadowRoot = sibling1.attachShadow({mode: 'open'});
await waitForDiscover(shadowRoot);
expectContext(grandchild, {parent: sibling1});
expectContext(sibling1, {children: [grandchild, shadowRoot]});
expectContext(shadowRoot, {children: [], parent: sibling1});
// Slot.
const slot = doc.createElement('slot');
shadowRoot.appendChild(slot);
await rediscover(grandchild);
expectContext(grandchild, {parent: shadowRoot});
expectContext(sibling1, {children: [shadowRoot]});
expectContext(shadowRoot, {children: [grandchild], parent: sibling1});
// Unslot.
shadowRoot.removeChild(slot);
await rediscover(grandchild);
expectContext(grandchild, {parent: sibling1});
expectContext(sibling1, {children: [grandchild, shadowRoot]});
expectContext(shadowRoot, {children: [], parent: sibling1});
});
it('should override the root', async () => {
await waitForDiscover(grandparent, parent);
await rediscover(sibling1, sibling2, cousin1);
ContextNode.get(grandparent).setIsRoot(true);
expectContext(grandparent, {
parent: null,
children: [parent, cousin1],
isRoot: true,
root: grandparent,
discoverable: false,
});
expectContext(parent, {
parent: grandparent,
children: [sibling1, sibling2],
isRoot: false,
root: grandparent,
discoverable: true,
});
expectContext(sibling1, {
parent,
children: [],
isRoot: false,
root: grandparent,
discoverable: true,
});
expectContext(sibling2, {
parent,
children: [],
isRoot: false,
root: grandparent,
discoverable: true,
});
expectContext(cousin1, {
parent: grandparent,
children: [],
isRoot: false,
root: grandparent,
discoverable: true,
});
});
it('should override the parent', async () => {
await waitForDiscover(grandparent);
await rediscover(sibling1, sibling2, cousin1);
expectContext(sibling1, {parent: grandparent});
expectContext(sibling2, {parent: grandparent});
expectContext(cousin1, {parent: grandparent});
ContextNode.get(sibling2).setParent(sibling1);
expectContext(sibling2, {
parent: sibling1,
children: [],
discoverable: false,
});
expectContext(sibling1, {
parent: grandparent,
children: [sibling2],
discoverable: true,
});
expectContext(grandparent, {
children: [sibling1, cousin1],
discoverable: true,
});
});
});
describe('tree connected later', () => {
beforeEach(async () => {
await waitForDiscover(sibling1, sibling2, cousin1);
});
function connectTree() {
doc.body.appendChild(tree);
return ContextNode.get(doc);
}
it('should configure the root', async () => {
expectContext(connectTree(), {
discoverable: false,
isRoot: true,
root: doc,
parent: null,
children: [],
});
});
it('should auto-discover an orphan', async () => {
expectContext(sibling1, {parent: null, root: null});
expectContext(sibling2, {parent: null, root: null});
expectContext(cousin1, {parent: null, root: null});
connectTree();
await rediscover(sibling1, sibling2, cousin1);
expectContext(sibling1, {
parent: doc,
root: doc,
children: [],
isRoot: false,
discoverable: true,
});
expectContext(sibling2, {
parent: doc,
root: doc,
children: [],
isRoot: false,
discoverable: true,
});
expectContext(cousin1, {
parent: doc,
root: doc,
children: [],
isRoot: false,
discoverable: true,
});
expectContext(doc, {
isRoot: true,
root: doc,
parent: null,
children: [sibling1, sibling2, cousin1],
});
});
it('should auto-discover the grandparent', async () => {
await waitForDiscover(grandparent);
await rediscover(sibling1, sibling2, cousin1);
connectTree();
await rediscover(grandparent);
expectContext(doc, {
isRoot: true,
root: doc,
parent: null,
children: [grandparent],
discoverable: false,
});
expectContext(grandparent, {
isRoot: false,
root: doc,
parent: doc,
children: [sibling1, sibling2, cousin1],
discoverable: true,
});
expectContext(sibling1, {
isRoot: false,
root: doc,
parent: grandparent,
children: [],
discoverable: true,
});
expectContext(sibling2, {
isRoot: false,
root: doc,
parent: grandparent,
children: [],
discoverable: true,
});
expectContext(cousin1, {
isRoot: false,
root: doc,
parent: grandparent,
children: [],
discoverable: true,
});
});
it('should insert an intermediary parent', async () => {
await waitForDiscover(grandparent, parent);
await rediscover(sibling1, sibling2, cousin1);
connectTree();
await rediscover(grandparent);
expectContext(doc, {
parent: null,
children: [grandparent],
isRoot: true,
root: doc,
discoverable: false,
});
expectContext(grandparent, {
parent: doc,
children: [parent, cousin1],
isRoot: false,
root: doc,
discoverable: true,
});
expectContext(parent, {
parent: grandparent,
children: [sibling1, sibling2],
isRoot: false,
root: doc,
discoverable: true,
});
expectContext(sibling1, {
parent,
children: [],
isRoot: false,
root: doc,
discoverable: true,
});
expectContext(sibling2, {
parent,
children: [],
isRoot: false,
root: doc,
discoverable: true,
});
expectContext(cousin1, {
parent: grandparent,
children: [],
isRoot: false,
root: doc,
discoverable: true,
});
});
});
describe('tree connected earlier', () => {
beforeEach(async () => {
doc.body.appendChild(tree);
await waitForDiscover(sibling1, sibling2, cousin1);
});
it('should configure root', () => {
expectContext(doc, {
root: doc,
isRoot: true,
parent: null,
children: [sibling1, sibling2, cousin1],
discoverable: false,
});
});
it('should auto-discover an orphan', async () => {
expectContext(sibling1, {
parent: doc,
root: doc,
isRoot: false,
children: [],
discoverable: true,
});
expectContext(sibling2, {
parent: doc,
root: doc,
isRoot: false,
children: [],
discoverable: true,
});
expectContext(cousin1, {
parent: doc,
root: doc,
isRoot: false,
children: [],
discoverable: true,
});
});
it('should auto-discover the grandparent', async () => {
await waitForDiscover(grandparent);
expectContext(doc, {
isRoot: true,
root: doc,
parent: null,
children: [grandparent],
discoverable: false,
});
expectContext(grandparent, {
isRoot: false,
root: doc,
parent: doc,
children: [sibling1, sibling2, cousin1],
discoverable: true,
});
expectContext(sibling1, {
isRoot: false,
root: doc,
parent: grandparent,
children: [],
discoverable: true,
});
expectContext(sibling2, {
isRoot: false,
root: doc,
parent: grandparent,
children: [],
discoverable: true,
});
expectContext(cousin1, {
isRoot: false,
root: doc,
parent: grandparent,
children: [],
discoverable: true,
});
});
it('should insert an intermediary parent', async () => {
await waitForDiscover(grandparent, parent);
expectContext(doc, {
isRoot: true,
root: doc,
parent: null,
children: [grandparent],
discoverable: false,
});
expectContext(grandparent, {
parent: doc,
children: [parent, cousin1],
isRoot: false,
root: doc,
discoverable: true,
});
expectContext(parent, {
parent: grandparent,
children: [sibling1, sibling2],
isRoot: false,
root: doc,
discoverable: true,
});
expectContext(sibling1, {
parent,
children: [],
isRoot: false,
root: doc,
discoverable: true,
});
expectContext(sibling2, {
parent,
children: [],
isRoot: false,
root: doc,
discoverable: true,
});
expectContext(cousin1, {
parent: grandparent,
children: [],
isRoot: false,
root: doc,
discoverable: true,
});
});
it('should move to new root', async () => {
await waitForDiscover(grandparent, parent);
const doc2 = document.implementation.createHTMLDocument('');
ContextNode.get(doc2);
doc2.body.appendChild(tree);
await rediscover(grandparent);
expectContext(doc, {
parent: null,
children: [],
isRoot: true,
root: doc,
discoverable: false,
});
expectContext(doc2, {
parent: null,
children: [grandparent],
isRoot: true,
root: doc2,
discoverable: false,
});
expectContext(grandparent, {
parent: doc2,
children: [parent, cousin1],
root: doc2,
});
expectContext(parent, {
parent: grandparent,
children: [sibling1, sibling2],
root: doc2,
});
expectContext(sibling1, {
parent,
children: [],
root: doc2,
});
expectContext(sibling2, {
parent,
children: [],
root: doc2,
});
expectContext(cousin1, {
parent: grandparent,
children: [],
root: doc2,
});
});
it('should override the root', async () => {
await waitForDiscover(grandparent, parent);
ContextNode.get(grandparent).setIsRoot(true);
expectContext(grandparent, {
parent: doc,
children: [parent, cousin1],
isRoot: true,
root: grandparent,
discoverable: false,
});
expectContext(parent, {
parent: grandparent,
children: [sibling1, sibling2],
isRoot: false,
root: grandparent,
discoverable: true,
});
expectContext(sibling1, {
parent,
children: [],
isRoot: false,
root: grandparent,
discoverable: true,
});
expectContext(sibling2, {
parent,
children: [],
isRoot: false,
root: grandparent,
discoverable: true,
});
expectContext(cousin1, {
parent: grandparent,
children: [],
isRoot: false,
root: grandparent,
discoverable: true,
});
});
});
});
describe('discover shadow DOM', () => {
let sibling1;
let sibling2;
let parent;
beforeEach(() => {
sibling1 = el('T-1-1-1');
sibling2 = el('T-1-1-2');
parent = el('T-1-1');
});
it('should rediscover slots when shadow root is added', async () => {
doc.body.appendChild(tree);
await waitForDiscover(parent, sibling1, sibling2);
const shadowRoot = parent.attachShadow({mode: 'open'});
ContextNode.get(shadowRoot).setParent(parent);
const slot1 = doc.createElement('slot');
slot1.name = 'slot1';
sibling1.setAttribute('slot', 'slot1');
shadowRoot.appendChild(slot1);
await waitForDiscover(shadowRoot, sibling1);
// sibling1's parent is shadow root because it matches the slot.
expectContext(sibling1, {parent: shadowRoot});
// sibling2's parent stays with "parent" because it's unslotted.
expectContext(sibling2, {parent});
});
describe('shadow DOM exists from the start', () => {
let shadowRoot;
let slot1, slot1Parent;
beforeEach(async () => {
shadowRoot = parent.attachShadow({mode: 'open'});
ContextNode.get(shadowRoot).setParent(parent);
slot1Parent = doc.createElement('div');
slot1Parent.id = 'slot1-parent';
shadowRoot.appendChild(slot1Parent);
slot1 = doc.createElement('slot');
slot1.id = slot1.name = 'slot1';
sibling1.setAttribute('slot', 'slot1');
slot1Parent.appendChild(slot1);
doc.body.appendChild(tree);
await waitForDiscover(parent, shadowRoot, sibling1, sibling2);
});
function awaitSlotChange() {
return new Promise((resolve) => {
shadowRoot.addEventListener('slotchange', resolve);
});
}
it('should assign shadow root as a parent via slot', () => {
// sibling1's parent is shadow root because it matches the slot.
expectContext(sibling1, {parent: shadowRoot});
// sibling2's parent stays with "parent" because it's unslotted.
expectContext(sibling2, {parent});
});
it('should reassign when slots change', async () => {
sibling1.removeAttribute('slot');
sibling2.setAttribute('slot', 'slot1');
await awaitSlotChange();
clock.runAll();
// sibling1's parent stays with "parent" because it's unslotted.
expectContext(sibling1, {parent});
// sibling2's parent is shadow root because it matches the slot.
expectContext(sibling2, {parent: shadowRoot});
});
it('should reassign when slots are removed', async () => {
slot1Parent.removeChild(slot1);
await waitForDiscover(sibling1);
// Returns to parent.
expectContext(sibling1, {parent});
});
it('should reassign to slot if it becomes a context node', async () => {
const sibling1Wait = waitForDiscover(sibling1);
await rediscover(slot1);
await sibling1Wait;
// slot belongs to the shadow root.
expectContext(slot1, {parent: shadowRoot});
// sibling1's parent is slot now.
expectContext(sibling1, {parent: slot1});
// Not changed: unslotted.
expectContext(sibling2, {parent});
});
it('should reassign to slot parent becomes a context node', async () => {
const sibling1Wait = waitForDiscover(sibling1);
await rediscover(slot1Parent);
await sibling1Wait;
// slot belongs to the shadow root.
expectContext(slot1Parent, {parent: shadowRoot});
// sibling1's parent is slot now.
expectContext(sibling1, {parent: slot1Parent});
// Not changed: unslotted.
expectContext(sibling2, {parent});
});
});
});
describe('discover groups', () => {
let sibling1;
let sibling2;
let cousin1;
let parent;
let grandparent;
beforeEach(async () => {
sibling1 = el('T-1-1-1');
sibling2 = el('T-1-1-2');
cousin1 = el('T-1-2-1');
parent = el('T-1-1');
grandparent = el('T-1');
await waitForDiscover(grandparent, parent, sibling1, sibling2, cousin1);
});
it('should rediscover children when a new group is added', async () => {
const group1 = ContextNode.get(parent).addGroup(
'group1',
(node) => node == sibling1,
0
);
await waitForDiscover(group1, sibling1);
clock.runAll();
expect(group1.node).to.equal(parent);
expectContext(group1, {parent, children: [sibling1]});
// sibling1 is reassigned to the group.
expectContext(sibling1, {parent: group1});
// sibling2 stays stays unchanged.
expectContext(sibling2, {parent});
});
it('should discover a new child', async () => {
const sibling3 = el('T-1-1-3');
const group1 = ContextNode.get(parent).addGroup(
'group1',
(node) => node == sibling3,
0
);
await waitForDiscover(group1);
// Discover the new node.
await rediscover(sibling3);
expectContext(sibling3, {parent: group1});
});
it('should handle weight', async () => {
const group1 = ContextNode.get(parent).addGroup(
'group1',
(node) => node == sibling1,
0
);
await waitForDiscover(group1, sibling1);
expectContext(sibling1, {parent: group1});
// A lower weight.
const group2 = ContextNode.get(parent).addGroup(
'group1',
(node) => node == sibling1,
-1
);
await waitForDiscover(group2, sibling1);
expectContext(sibling1, {parent: group1});
// A higher weight.
const group3 = ContextNode.get(parent).addGroup(
'group1',
(node) => node == sibling1,
1
);
await waitForDiscover(group3, sibling1);
expectContext(sibling1, {parent: group3});
});
});
describe('scanners', () => {
const EXCLUDE_SELF = false;
let parent;
let grandparent;
let sibling1;
let sibling2;
let cousin1;
beforeEach(async () => {
grandparent = ContextNode.get(el('T-1'));
parent = ContextNode.get(el('T-1-1'));
sibling1 = ContextNode.get(el('T-1-1-1'));
sibling2 = ContextNode.get(el('T-1-1-2'));
cousin1 = ContextNode.get(el('T-1-2-1'));
await waitForDiscover(grandparent, parent, sibling1, sibling2, cousin1);
});
it('should find closest', () => {
const any = () => true;
expect(findParent(sibling1, any)).to.equal(sibling1);
expect(findParent(sibling1, any, null, EXCLUDE_SELF)).to.equal(parent);
const eq = (cn, arg) => cn === arg;
expect(findParent(sibling1, eq, sibling1)).to.equal(sibling1);
expect(findParent(sibling1, eq, sibling1, EXCLUDE_SELF)).to.be.null;
expect(findParent(sibling1, eq, parent)).to.equal(parent);
});
it('should scan the subtree completely', () => {
const scanned = [];
const scan = (cn, exclude) => {
scanned.push(cn);
if (exclude && exclude.indexOf(cn) != -1) {
return false;
}
return true;
};
// Scan all.
scanned.length = 0;
deepScan(grandparent, scan);
expect(scanned).to.deep.equal([
grandparent,
parent,
sibling1,
sibling2,
cousin1,
]);
// Scan subtree.
scanned.length = 0;
deepScan(grandparent, scan, null, true, EXCLUDE_SELF);
expect(scanned).to.deep.equal([parent, sibling1, sibling2, cousin1]);
// Scan some.
scanned.length = 0;
deepScan(grandparent, scan, [parent]);
expect(scanned).to.deep.equal([grandparent, parent, cousin1]);
});
});
});
| el |
process.rs | //! Processes, threads and events.
//!
//! Invoke [`wait`] to obtain an [`Event`].
//!
//! [`wait`]: fn.wait.html
//! [`Event`]: struct.Event.html
use super::arch::AMD64RegisterSet;
use super::signal::Signal;
use bitflags::bitflags;
use errno::{errno, set_errno, Errno};
use failure::Fail;
use log::*;
use std::mem::MaybeUninit;
use std::path::Path;
pub type RegisterSet = AMD64RegisterSet;
#[repr(C)]
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct IoVec {
base: u64,
len: usize,
}
impl IoVec {
unsafe fn from_buffer(buf: &[u8]) -> Self {
Self {
base: buf.as_ptr() as _,
len: buf.len(),
}
}
fn new(base: u64, len: usize) -> Self {
Self { base, len }
}
}
/// The terminate reasons for a process vanished from the OS or in limbo.
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum TerminateReason {
Exit { status: i32 },
Signal { signal: Signal, has_coredump: bool },
}
impl TerminateReason {
/// Returns `Some(result)` if the status denotes terminated process. Returns
/// `None` if not this case.
fn decode(status: i32) -> Option<Self> {
unsafe {
if libc::WIFEXITED(status) {
// Bits:
// - 0-6: 0
// - 7
// - 8-15: exit code
//
let status = libc::WEXITSTATUS(status);
Some(TerminateReason::Exit { status })
} else if libc::WIFSIGNALED(status) {
// Bits:
// - 0-6: terminal signal in [1, 0x7f)
// - 7: have coredump?
Some(TerminateReason::Signal {
signal: Signal::coerce_from(libc::WTERMSIG(status)),
has_coredump: libc::WCOREDUMP(status),
})
} else {
None
}
}
}
}
/// The child of a fork-ish operation, i.e. a new thread or process.
///
/// It is reported when [`SuspendReason::Fork`] happens.
///
/// [`SuspendReason::Fork`]: enum.SuspendReason.html
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum ForkChild {
Thread(Thread),
Process(Process),
}
impl ForkChild {
fn from_tid(parent: Thread, child_id: u32) -> Self {
let child_thread = child_id.into();
if parent.same_process(child_thread) {
ForkChild::Thread(child_thread)
} else {
ForkChild::Process(child_id.into())
}
}
}
/// Kinds for fork-ish syscalls.
///
/// A process can be created by `vfork`/`fork`/`clone`, and a thread can be
/// created by `clone`.
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum ForkKind {
Fork,
Clone,
VFork,
VForkDone,
}
/// The process has been suspended, usually because of an event.
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum SuspendReason {
Signal(Signal),
Terminate(TerminateReason),
Syscall,
Exec,
Fork { child: ForkChild, kind: ForkKind },
SecComp,
Unknown,
}
impl SuspendReason {
/// Returns `Some(result)` if the status denotes suspended process. Returns
/// `None` if not this case.
fn decode(thread: Thread, status: i32) -> Option<Self> {
if !unsafe { libc::WIFSTOPPED(status) } {
return None;
}
// Bits 0-7 = 0x7f
let signal = unsafe { libc::WSTOPSIG(status) }; // Bits 8-15
let extension = status >> 16; // Bits 16-31
let reason = if signal == libc::SIGTRAP | 0x80 {
// Situation: syscall (PTRACE_O_TRACESYSGOOD)
// Bits:
// - 8-14: SIGTRAP
// - 15: 1 (syscall)
// - 16-31: 0
SuspendReason::Syscall
} else if extension == 0 {
// Situation: signal
// Bits:
// - 8-14: signal number
// - 15
// - 16-31: 0
SuspendReason::Signal(Signal::coerce_from(signal))
} else {
// Situation: ptrace event
// Bits:
// - 8-14: SIGTRAP
// - 15: 0
// - 16-31: event number
match extension {
libc::PTRACE_EVENT_EXIT => Self::decode_exit(thread),
libc::PTRACE_EVENT_EXEC => SuspendReason::Exec,
libc::PTRACE_EVENT_SECCOMP => SuspendReason::SecComp,
libc::PTRACE_EVENT_FORK
| libc::PTRACE_EVENT_VFORK
| libc::PTRACE_EVENT_VFORK_DONE
| libc::PTRACE_EVENT_CLONE => Self::decode_fork(thread, extension),
_ => panic!(
"unexpected ptrace event for {}: signal = {}, extension = {}",
thread.tid(),
signal,
extension
),
}
};
Some(reason)
}
fn decode_exit(thread: Thread) -> Self {
match thread.trace_get_event_message() {
Ok(exit_code) => TerminateReason::decode(exit_code as _)
.map(SuspendReason::Terminate)
.unwrap_or_else(|| {
panic!(
"non-terminal code on PTRACE_EVENT_EXIT, code = {}",
exit_code
)
}),
Err(e) => {
debug!(target: "trace",
"cannot to get event for PTRACE_EVENT_EXIT: {}", e);
SuspendReason::Unknown
}
}
}
fn decode_fork(thread: Thread, extension: i32) -> Self {
let kind = match extension {
libc::PTRACE_EVENT_FORK => ForkKind::Fork,
libc::PTRACE_EVENT_VFORK => ForkKind::VFork,
libc::PTRACE_EVENT_VFORK_DONE => ForkKind::VForkDone,
libc::PTRACE_EVENT_CLONE => ForkKind::Clone,
_ => unreachable!(),
};
match thread.trace_get_event_message() {
Ok(child_tid) => SuspendReason::Fork {
child: ForkChild::from_tid(thread, child_tid as _),
kind,
},
Err(e) => {
debug!(target: "trace",
"cannot to get event for {:?}: {}", kind, e);
SuspendReason::Unknown
}
}
}
}
/// Thread event details.
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum EventKind {
Terminated(TerminateReason),
Suspended(SuspendReason),
Resumed,
}
impl EventKind {
/// Decode the waitpid event. It may fail only when `Event::Terminated` is occurred.
fn decode(thread: Thread, status: i32) -> EventKind {
if unsafe { libc::WIFCONTINUED(status) } {
// Bits: 0-15 = 0xffff
EventKind::Resumed
} else if let Some(reason) = TerminateReason::decode(status) {
EventKind::Terminated(reason)
} else if let Some(reason) = SuspendReason::decode(thread, status) {
EventKind::Suspended(reason)
} else {
panic!("unexpected wait status");
}
}
/// Returns whether the event is SIGTRAP.
pub fn is_trap(&self) -> bool {
if let EventKind::Suspended(SuspendReason::Signal(Signal::SIGTRAP)) = self {
true
} else {
false
}
}
}
/// Thread events.
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Event {
/// The thread where the event occurred.
pub thread: Thread,
/// Details of the event.
pub kind: EventKind,
}
impl Event {
/// Automatically resume suspended thread.
///
/// # Behavior
/// * SIGSTOP / SIGTRAP: resume with no signal
/// * Other signals: resume with the original signal
/// * Fork: resume the new process or thread
pub fn resume(&self) -> Result<(), TraceError> {
if let EventKind::Suspended(ref reason) = self.kind {
match reason {
&SuspendReason::Signal(signal) => {
if signal == Signal::SIGSTOP || signal == Signal::SIGTRAP {
// SIGSTOP and SIGTRAP is used for debugging. It's unlikely that
// other processes will send this signal.
self.thread.trace_resume(0)
} else {
self.thread.trace_resume(signal as _)
}
}
SuspendReason::Fork { child, kind } => {
let _ = kind;
match child {
ForkChild::Thread(thread) => thread.trace_resume(0)?,
ForkChild::Process(process) => {
// The only thread of a newly created process is its main thread.
process.main_thread().trace_resume(0)?
}
}
self.thread.trace_resume(0)
}
_ => self.thread.trace_resume(0),
}
} else {
Ok(())
}
}
fn new(tid: u32, status: i32) -> Self {
let thread = Thread::from(tid);
let kind = EventKind::decode(thread, status);
Self { thread, kind }
}
}
/// Error returned by `wait`.
#[derive(Debug, Fail)]
pub enum WaitError {
#[fail(display = "no child process")]
NoChild,
#[fail(display = "no event available")]
NoEvent,
}
/// Checks for the next process event.
///
/// Blocking mode (`WaitOptions::NONBLOCK` is NOT set):
/// * Wait and returns `Ok(Event)` denoting the event;
/// * Immediately returns `Err(WaitError::NoChild)` if no child exists.
///
/// Nonblocking mode (`WaitOptions::NONBLOCK` is set):
/// * Returns `Ok(event)` if an event is available;
/// * Returns `Err(NoChild)` if no child exists or the child is filtered out;
/// * Returns `Err(NoEvent)` if no event is immediately available.
pub fn wait(target: WaitTarget, options: WaitOptions) -> Result<Event, WaitError> {
let mut status = 0;
loop {
match unsafe { libc::waitpid(target.0, &mut status as _, options.bits()) } {
// Handle error.
-1 => {
let err = errno();
match err.0 {
// No child process. All pending events are processed.
libc::ECHILD => return Err(WaitError::NoChild),
// Interrupted by signal: retry.
libc::EINTR => continue,
// Other error: shouldn't happen.
_ => panic!("unknown waitpid error: {}", err),
}
}
// Have child process, but no state change. Implies that all
// pending events are processed.
0 => return Err(WaitError::NoEvent),
pid => {
return Ok(Event::new(pid as _, status));
}
}
}
}
/// Helper for specifying target(s) for `wait`.
pub struct WaitTarget(i32);
impl WaitTarget {
/// Wait for any child process.
pub fn any() -> Self {
Self(-1)
}
/// Wait for any child process whose process group ID is equal to that of
/// the calling process.
pub fn same_pgid() -> Self {
Self(0)
}
/// Wait for any child process whose process group ID is equal to `pgid`.
pub fn by_pgid(pgid: u32) -> Self {
Self(-(pgid as i32))
}
/// Wait for the child whose process ID is equal to `pid`.
pub fn by_pid(pid: u32) -> Self {
Self(pid as _)
}
}
bitflags! {
/// Options for invoking `wait`.
pub struct WaitOptions : i32 {
/// Only wait child of current thread. If not specified, waits for child
/// of all threads inside current process.
const ONLY_CURRENT_THREAD_CHILD = libc::__WNOTHREAD;
/// Do not wait for the next event. Return immediately even if no event
/// is available.
const NONBLOCK = libc::WNOHANG;
/// Wait for both process and thread events. If not specified, only wait
/// for process events.
const PROCESS_AND_THREADS = libc::__WALL;
/// Wait for thread events only.
const THREAD_ONLY = libc::__WCLONE;
}
}
impl WaitOptions {
/// Recommended options if `ptrace` is enabled.
pub fn default_trace() -> WaitOptions {
WaitOptions::PROCESS_AND_THREADS | WaitOptions::ONLY_CURRENT_THREAD_CHILD
}
/// Recommended options for just waiting for exit status.
pub fn default_exit() -> WaitOptions {
WaitOptions::ONLY_CURRENT_THREAD_CHILD
}
}
/// Error type for tracing a process.
#[derive(Debug, Fail)]
pub enum TraceError {
#[fail(display = "invalid signal")]
InvalidSignal,
#[fail(display = "invalid remote address or length")]
InvalidRemoteAddress,
#[fail(display = "process not found")]
NotFound,
#[fail(display = "permission denied")]
PermissionDenied,
}
/// Thin wrapper for raw thread id.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct Thread {
tid: u32,
}
impl Thread {
/// Returns the thread id of calling thread.
pub fn current_tid() -> u32 {
unsafe { libc::syscall(libc::SYS_gettid) as u32 }
}
/// Returns the thread id (`gettid()` or `Pid` in procfs)
pub fn tid(self) -> u32 |
/// Returns the process which contains current thread.
pub fn to_process(self) -> Result<Process, procfs::ProcError> {
self.to_procfs()
.and_then(|process| process.status())
.and_then(|status| Ok(Process::from(status.tgid as u32)))
}
/// Returns the next event by `wait(tid)`.
pub fn trace_next_event(self) -> Result<Event, WaitError> {
wait(WaitTarget::by_pid(self.tid()), WaitOptions::THREAD_ONLY)
}
/// PTRACE_DETACH. `signal` = 0: continue without signaling
pub fn trace_detach(self, signal: i32) -> Result<(), TraceError> {
self.ptrace(libc::PTRACE_DETACH, None, signal as _)
.map(|_| ())
}
/// PTRACE_CONT. `signal` = 0: continue without signaling
pub fn trace_resume(self, signal: i32) -> Result<(), TraceError> {
self.ptrace(libc::PTRACE_CONT, None, signal as _)
.map(|_| ())
}
/// PTRACE_SETOPTIONS.
///
/// Note that a child process (thread) always inherits its parent's option.
/// It's no need to set the options again on a newly created child.
///
/// The ptrace options are stored in `task_struct.ptrace`. When creating a
/// new task, `_do_fork` finally invokes `arch_dup_task_struct`, where
/// `task_struct` is directly copied.
pub fn trace_set_options(self, options: i32) -> Result<(), TraceError> {
self.ptrace(libc::PTRACE_SETOPTIONS, None, options as _)
.map(|_| ())
}
/// PTRACE_GETEVENTMSG.
pub fn trace_get_event_message(self) -> Result<usize, TraceError> {
let mut msg = 0usize;
self.ptrace(libc::PTRACE_GETEVENTMSG, None, &mut msg as *mut _ as _)?;
Ok(msg)
}
/// PTRACE_GET_REGS.
pub fn trace_get_registers(self) -> Result<RegisterSet, TraceError> {
let mut r = MaybeUninit::uninit();
self.ptrace(libc::PTRACE_GETREGS, None, r.as_mut_ptr() as _)?;
Ok(unsafe { r.assume_init() })
}
/// PTRACE_SET_REGS.
pub fn trace_set_registers(self, regs: &RegisterSet) -> Result<(), TraceError> {
self.ptrace(libc::PTRACE_SETREGS, None, regs as *const _ as _)?;
Ok(())
}
/// PTRACE_SINGLESTEP.
pub fn trace_single_step(self, signal: i32) -> Result<(), TraceError> {
self.ptrace(libc::PTRACE_SINGLESTEP, None, signal as _)
.map(|_| ())
}
/// Returns whether the thread exists in the kernel.
pub fn exists(self) -> bool {
self.to_procfs()
.map(|proc| proc.stat.state != 'Z') // Zombie process does not exist
.unwrap_or(false) // The process has been reaped
}
/// Returns whether `others` and `self` are inside the same process.
pub fn same_process(self, others: Thread) -> bool {
let path = format!("/proc/{}/task/{}", self.tid, others.tid);
Path::new(path.as_str()).exists()
}
/// Suspend current thread. Current thread will not stop immediately,
/// perform `wait` to wait for change of state.
pub fn suspend(self, pid: u32) -> Result<(), TraceError> {
self.send_signal(Signal::SIGSTOP, pid)
}
/// Sends a signal to the thread.
pub fn send_signal(self, signal: Signal, pid: u32) -> Result<(), TraceError> {
if unsafe { libc::syscall(libc::SYS_tgkill, pid, self.tid, signal as i32) } == 0 {
return Ok(());
}
Err(match errno() {
Errno(libc::EINVAL) => TraceError::InvalidSignal,
Errno(libc::EPERM) => TraceError::PermissionDenied,
Errno(libc::ESRCH) => TraceError::NotFound,
_ => unreachable!(), // Corner case: EAGAIN could be returned on real-time signal
})
}
/// Returns info from procfs.
pub fn to_procfs(self) -> procfs::ProcResult<procfs::Process> {
procfs::Process::new(self.tid as _)
}
fn ptrace(
self,
request: libc::c_uint,
addr: Option<usize>,
data: usize,
) -> Result<libc::c_long, TraceError> {
set_errno(Errno(0));
let addr = addr.unwrap_or(0);
let ret = unsafe { libc::ptrace(request, self.tid, addr, data) };
let errno = errno();
match errno.0 {
0 => Ok(ret),
libc::EPERM => Err(TraceError::PermissionDenied),
libc::EIO | libc::EINVAL | libc::EFAULT => Err(TraceError::InvalidRemoteAddress),
libc::ESRCH => {
if self.exists() {
panic!("ptrace invoked on thread with unexpected state")
} else {
Err(TraceError::NotFound)
}
}
_ => panic!("unexpect ptrace error: {}", errno),
}
}
}
/// Thin wrapper for raw process id.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub struct Process {
pid: u32,
}
impl Process {
/// Returns the process id of current process.
pub fn current_pid() -> u32 {
unsafe { libc::getpid() as _ }
}
/// Returns the process id (`getpid()` or `Tgid` in procfs)
pub fn pid(self) -> u32 {
self.pid
}
/// Returns the main thread object of current process.
pub fn main_thread(self) -> Thread {
Thread { tid: self.pid }
}
/// Sends a signal to the process.
pub fn send_signal(self, signal: Signal) -> Result<(), TraceError> {
if unsafe { libc::kill(self.pid as _, signal as i32) } == 0 {
return Ok(());
}
Err(match errno() {
Errno(libc::EINVAL) => TraceError::InvalidSignal,
Errno(libc::EPERM) => TraceError::PermissionDenied,
Errno(libc::ESRCH) => TraceError::NotFound,
_ => unreachable!(),
})
}
/// Terminate the process.
pub fn terminate(self) {
if let Err(e) = self.send_signal(Signal::SIGKILL) {
warn!(target: "trace", "cannot terminate {}: {}", self.pid, e);
}
}
/// Returns `Some(TerminateReason)`. Returns `None` if still running. Panics
/// on invalid pid.
pub fn get_terminate_reason(self) -> Option<TerminateReason> {
match wait(
WaitTarget::by_pid(self.pid),
WaitOptions::NONBLOCK | WaitOptions::ONLY_CURRENT_THREAD_CHILD,
) {
Ok(event) => {
if let EventKind::Terminated(reason) = event.kind {
Some(reason)
} else {
None
}
}
Err(WaitError::NoChild) => panic!("waiting for nonexist child"),
Err(WaitError::NoEvent) => None,
}
}
/// Returns info from procfs.
pub fn to_procfs(self) -> procfs::ProcResult<procfs::Process> {
self.main_thread().to_procfs()
}
/// Returns the next event by `wait(pid, __WALL)`.
pub fn trace_next_event(self) -> Result<Event, WaitError> {
wait(
WaitTarget::by_pid(self.pid()),
WaitOptions::PROCESS_AND_THREADS,
)
}
/// Read or write multiple memory regions.
pub fn trace_read_write_memory_vectorized(
self,
write_mode: bool,
local: &[IoVec],
remote: &[IoVec],
) -> Result<usize, TraceError> {
let r = if write_mode {
unsafe {
libc::process_vm_writev(
self.pid() as _,
local.as_ptr() as _,
local.len() as _,
remote.as_ptr() as _,
remote.len() as _,
0,
)
}
} else {
unsafe {
libc::process_vm_readv(
self.pid() as _,
local.as_ptr() as _,
local.len() as _,
remote.as_ptr() as _,
remote.len() as _,
0,
)
}
};
if r != -1 {
return Ok(r as _);
}
let errno = errno();
Err(match errno.0 {
libc::EFAULT => TraceError::InvalidRemoteAddress,
libc::EPERM => TraceError::PermissionDenied,
libc::ESRCH => TraceError::NotFound,
_ => unreachable!(), // ENOMEM, EINVAL
})
}
/// Reads memory.
pub fn trace_read_memory(self, base: u64, buf: &mut [u8]) -> Result<(), TraceError> {
let local = [unsafe { IoVec::from_buffer(buf) }];
let remote = [IoVec::new(base, buf.len())];
let len =
self.trace_read_write_memory_vectorized(false /* write_mode */, &local, &remote)?;
if len != buf.len() {
Err(TraceError::InvalidRemoteAddress)
} else {
Ok(())
}
}
/// Writes memory; memory protection is respected.
pub fn trace_write_memory(self, base: u64, buf: &[u8]) -> Result<(), TraceError> {
let local = [unsafe { IoVec::from_buffer(buf) }];
let remote = [IoVec::new(base, buf.len())];
let len =
self.trace_read_write_memory_vectorized(true /* write_mode */, &local, &remote)?;
if len != buf.len() {
Err(TraceError::InvalidRemoteAddress)
} else {
Ok(())
}
}
/// PTRACE_PEEKDATA: read memory in word.
pub fn trace_peek(self, base: u64) -> Result<usize, TraceError> {
Ok(self
.main_thread()
.ptrace(libc::PTRACE_PEEKDATA, Some(base as _), 0)? as _)
}
/// PTRACE_POKEDATA: write memory in word.
pub fn trace_poke(self, base: u64, data: usize) -> Result<(), TraceError> {
self.main_thread()
.ptrace(libc::PTRACE_POKEDATA, Some(base as _), data)?;
Ok(())
}
/// Write memory *without* checking protection. Slower, but more robust.
pub fn trace_write_memory_force(self, mut base: u64, mut buf: &[u8]) -> Result<(), TraceError> {
const WORD_SIZE: usize = std::mem::size_of::<usize>();
// As the write operation works in word level, we need to align the
// buffer first. Here's an example request, where:
// WORD_SIZE = 8, base = 5, buf.len() = 13.
//
// | 0 1 2 3 4 5 6 7 | 8 9 a b c d e f |
// | x x x | x x x x x x x x |
// | 0 1 2 3 4 5 6 7 | 8 9 a b c d e f |
// | x x | |
while !buf.is_empty() {
let reminder = base as usize % WORD_SIZE;
if reminder != 0 || buf.len() < WORD_SIZE {
// We need to read the buffer back.
let aligned = base - reminder as u64;
let mut word = self.trace_peek(aligned)?.to_ne_bytes();
// Partially update the buffer.
let begin = reminder;
let end = std::cmp::min(begin + buf.len(), WORD_SIZE);
let len = end - begin;
word[begin..end].copy_from_slice(&buf[0..len]);
self.trace_poke(aligned, usize::from_ne_bytes(word))?;
base += len as u64;
buf = &buf[len..];
} else {
let mut word = [0; WORD_SIZE];
word.copy_from_slice(&buf[0..WORD_SIZE]);
self.trace_poke(base, usize::from_ne_bytes(word))?;
base += WORD_SIZE as u64;
buf = &buf[WORD_SIZE..];
}
}
Ok(())
}
}
impl From<u32> for Process {
fn from(pid: u32) -> Process {
Process { pid }
}
}
impl From<u32> for Thread {
fn from(tid: u32) -> Thread {
Thread { tid }
}
}
| {
self.tid
} |
lib.rs | use std::error::Error;
use std::fs;
use std::env;
pub struct Config {
pub query: String,
pub filename: String,
pub case_sensitive: bool,
}
impl Config {
pub fn new(mut args: env::Args) -> Result<Config, &'static str> {
args.next();
let query = match args.next() {
Some(arg) => arg,
None => return Err("Didn't get a query string"),
};
let filename = match args.next() {
Some(arg) => arg,
None => return Err("Didn't get a file name"),
};
let case_sensitive = env::var("CASE_INSENSITIVE").is_err();
Ok(Config {
query,
filename,
case_sensitive,
})
}
}
pub fn run(config: Config) -> Result<(), Box<dyn Error>> {
let contents = fs::read_to_string(config.filename)?;
let results = if config.case_sensitive | else {
search_case_insensitive(&config.query, &contents)
};
for line in results {
println!("{}", line);
}
Ok(())
}
pub fn search<'a>(query: &str, contents: &'a str) -> Vec<&'a str> {
contents
.lines()
.filter(|line| line.contains(query))
.collect()
}
pub fn search_case_insensitive<'a>(
query: &str,
contents: &'a str,
) -> Vec<&'a str> {
let query = query.to_lowercase();
let mut results = Vec::new();
for line in contents.lines() {
if line.to_lowercase().contains(&query) {
results.push(line);
}
}
results
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn case_sensitive() {
let query = "duct";
let contents = "\
Rust:
safe, fast, productive.
Pick three.
Duct tape.";
assert_eq!(vec!["safe, fast, productive."], search(query, contents));
}
#[test]
fn case_insensitive() {
let query = "rUsT";
let contents = "\
Rust:
safe, fast, productive.
Pick three.
Trust me.";
assert_eq!(
vec!["Rust:", "Trust me."],
search_case_insensitive(query, contents)
);
}
} | {
search(&config.query, &contents)
} |
lib.rs | #![recursion_limit = "128"]
#[macro_use]
extern crate bitfield;
// We use a constant to make sure bits positions don't need to be literals but
// can also be constants or expressions.
const THREE: usize = 3;
#[derive(Copy, Clone, Debug)]
pub struct Foo(u16);
impl From<u8> for Foo {
fn from(value: u8) -> Foo {
Foo(u16::from(value))
}
}
impl From<Foo> for u8 {
fn from(value: Foo) -> u8 {
value.0 as u8
}
}
bitfield! {
#[derive(Copy, Clone)]
/// documentation comments also work!
struct FooBar(u32);
impl Debug;
foo1, set_foo1: 0, 0;
u8;
foo2, set_foo2: 31, 31;
foo3, set_foo3: THREE, 0;
// We make sure attributes are applied to fields. If attributes were not
// applied, the compilation would fail with a `duplicate definition`
// error.
#[cfg(not(test))]
foo3, set_foo3: 3, 0;
u16, foo4, set_foo4: 31, 28;
foo5, set_foo5: 0, 0, 32;
u32;
foo6, set_foo6: 5, THREE, THREE;
getter_only, _: 3, 1;
_, setter_only: 2*2, 2;
getter_only_array, _: 5, 3, 3;
_, setter_only_array: 2*THREE, 4, 3;
all_bits, set_all_bits: 31, 0;
single_bit, set_single_bit: 3;
u8, into Foo, into_foo1, set_into_foo1: 31, 31;
pub u8, into Foo, into_foo2, set_into_foo2: 31, 31;
u8, from into Foo, from_foo1, set_from_foo1: 31, 31;
u8, from into Foo, _, set_from_foo2: 31, 31;
u8;
into Foo, into_foo3, set_into_foo3: 31, 31;
pub into Foo, into_foo4, set_into_foo4: 31, 31;
into Foo, _, set_into_foo5: 31, 31;
into Foo, into_foo6, _: 29, 29, 3;
from into Foo, from_foo3, set_from_foo3: 31, 31;
from into Foo, _, set_from_foo4: 31, 31;
from into Foo, from_foo5, set_from_foo5: 29, 29, 3;
from into Foo, from_foo6, _: 31, 31;
i8;
signed_single_bit, set_signed_single_bit: 0, 0;
signed_two_bits, set_signed_two_bits: 1, 0;
signed_eight_bits, set_signed_eight_bits: 7, 0;
signed_eight_bits_unaligned, set_signed_eight_bits_unaligned: 8, 1;
u128, u128_getter, set_u128: 8, 1;
i128, i128_getter, set_i128: 8, 1;
}
impl FooBar {
bitfield_fields! {
// Boolean field don't need a type
foo7, _: 1;
}
bitfield_fields! {
// If all fields have a type, we don't need to specify a default type
u8, foo8,_: 1, 0;
u32, foo9, _: 2, 0;
}
bitfield_fields! {
// We can still set a default type
u16;
foo10, _: 2, 0;
u32, foo11, _: 2, 0;
foo12, _: 2, 0;
}
// Check if an empty bitfield_fields compiles without errors.
bitfield_fields! {}
}
#[test]
fn test_single_bit() {
let mut fb = FooBar(0);
fb.set_foo1(1);
assert_eq!(0x1, fb.0);
assert_eq!(0x1, fb.foo1());
assert_eq!(0x0, fb.foo2());
assert_eq!(false, fb.single_bit());
assert_eq!(-1, fb.signed_single_bit());
fb.set_foo2(1);
assert_eq!(0x8000_0001, fb.0);
assert_eq!(0x1, fb.foo1());
assert_eq!(0x1, fb.foo2());
assert_eq!(false, fb.single_bit());
assert_eq!(-1, fb.signed_single_bit());
fb.set_foo1(0);
assert_eq!(0x8000_0000, fb.0);
assert_eq!(0x0, fb.foo1());
assert_eq!(0x1, fb.foo2());
assert_eq!(false, fb.single_bit());
assert_eq!(0, fb.signed_single_bit());
fb.set_single_bit(true);
assert_eq!(0x8000_0008, fb.0);
assert_eq!(0x0, fb.foo1());
assert_eq!(0x1, fb.foo2());
assert_eq!(true, fb.single_bit());
assert_eq!(0, fb.signed_single_bit());
fb.set_signed_single_bit(-1);
assert_eq!(0x8000_0009, fb.0);
assert_eq!(0x1, fb.foo1());
assert_eq!(0x1, fb.foo2());
assert_eq!(true, fb.single_bit());
assert_eq!(-1, fb.signed_single_bit());
}
#[test]
fn test_single_bit_plus_garbage() {
let mut fb = FooBar(0);
fb.set_foo1(0b10);
assert_eq!(0x0, fb.0);
assert_eq!(0x0, fb.foo1());
assert_eq!(0x0, fb.foo2());
fb.set_foo1(0b11);
assert_eq!(0x1, fb.0);
assert_eq!(0x1, fb.foo1());
assert_eq!(0x0, fb.foo2());
}
#[test]
fn test_multiple_bit() {
let mut fb = FooBar(0);
fb.set_foo3(0x0F);
assert_eq!(0xF, fb.0);
assert_eq!(0xF, fb.foo3());
assert_eq!(0x0, fb.foo4());
fb.set_foo4(0x0F);
assert_eq!(0xF000_000F, fb.0);
assert_eq!(0xF, fb.foo3());
assert_eq!(0xF, fb.foo4());
fb.set_foo3(0);
assert_eq!(0xF000_0000, fb.0);
assert_eq!(0x0, fb.foo3());
assert_eq!(0xF, fb.foo4());
fb.set_foo3(0xA);
assert_eq!(0xF000_000A, fb.0);
assert_eq!(0xA, fb.foo3());
assert_eq!(0xF, fb.foo4());
}
#[test]
fn test_getter_setter_only() {
let mut fb = FooBar(0);
fb.setter_only(0x7);
assert_eq!(0x1C, fb.0);
assert_eq!(0x6, fb.getter_only());
}
#[test]
fn test_array_field1() {
let mut fb = FooBar(0);
fb.set_foo5(0, 1);
assert_eq!(0x1, fb.0);
assert_eq!(1, fb.foo5(0));
fb.set_foo5(0, 0);
assert_eq!(0x0, fb.0);
assert_eq!(0, fb.foo5(0));
fb.set_foo5(0, 1);
fb.set_foo5(6, 1);
fb.set_foo5(31, 1);
assert_eq!(0x8000_0041, fb.0);
assert_eq!(1, fb.foo5(0));
assert_eq!(1, fb.foo5(6));
assert_eq!(1, fb.foo5(31));
assert_eq!(0, fb.foo5(1));
assert_eq!(0, fb.foo5(5));
assert_eq!(0, fb.foo5(7));
assert_eq!(0, fb.foo5(30));
}
#[test]
fn test_array_field2() {
let mut fb = FooBar(0);
fb.set_foo6(0, 1);
assert_eq!(0x8, fb.0);
assert_eq!(1, fb.foo6(0));
assert_eq!(0, fb.foo6(1));
assert_eq!(0, fb.foo6(2));
fb.set_foo6(0, 7);
assert_eq!(0x38, fb.0);
assert_eq!(7, fb.foo6(0));
assert_eq!(0, fb.foo6(1));
assert_eq!(0, fb.foo6(2));
fb.set_foo6(2, 7);
assert_eq!(0xE38, fb.0);
assert_eq!(7, fb.foo6(0));
assert_eq!(0, fb.foo6(1));
assert_eq!(7, fb.foo6(2));
fb.set_foo6(0, 0);
assert_eq!(0xE00, fb.0);
assert_eq!(0, fb.foo6(0));
assert_eq!(0, fb.foo6(1));
assert_eq!(7, fb.foo6(2));
}
#[allow(unknown_lints)]
#[allow(identity_op)]
#[allow(erasing_op)]
#[test]
fn test_setter_only_array() {
let mut fb = FooBar(0);
fb.setter_only_array(0, 0);
assert_eq!(0x0, fb.0);
fb.setter_only_array(0, 0b111);
assert_eq!(0b111 << (4 + 0 * 2), fb.0);
fb.setter_only_array(0, 0);
fb.setter_only_array(1, 0b111);
assert_eq!(0b111 << (4 + 1 * 3), fb.0);
fb.setter_only_array(1, 0);
fb.setter_only_array(2, 0b111);
assert_eq!(0b111 << (4 + 2 * 3), fb.0);
}
#[test]
fn test_getter_only_array() {
let mut fb = FooBar(0);
assert_eq!(0, fb.getter_only_array(0));
assert_eq!(0, fb.getter_only_array(1));
assert_eq!(0, fb.getter_only_array(2));
fb.0 = !(0x1FF << 3);
assert_eq!(0, fb.getter_only_array(0));
assert_eq!(0, fb.getter_only_array(1));
assert_eq!(0, fb.getter_only_array(2));
fb.0 = 0xF << 3;
assert_eq!(0b111, fb.getter_only_array(0));
assert_eq!(0b001, fb.getter_only_array(1));
assert_eq!(0, fb.getter_only_array(2));
fb.0 = 0xF << 6;
assert_eq!(0, fb.getter_only_array(0));
assert_eq!(0b111, fb.getter_only_array(1));
assert_eq!(0b001, fb.getter_only_array(2));
fb.0 = 0xF << 8;
assert_eq!(0, fb.getter_only_array(0));
assert_eq!(0b100, fb.getter_only_array(1));
assert_eq!(0b111, fb.getter_only_array(2));
fb.0 = 0b101_010_110 << 3;
assert_eq!(0b110, fb.getter_only_array(0));
assert_eq!(0b010, fb.getter_only_array(1));
assert_eq!(0b101, fb.getter_only_array(2));
}
#[test]
fn test_signed() {
let mut fb = FooBar(0);
assert_eq!(0, fb.signed_two_bits());
assert_eq!(0, fb.signed_eight_bits());
assert_eq!(0, fb.signed_eight_bits_unaligned());
fb.set_signed_two_bits(-2);
assert_eq!(0b10, fb.0);
assert_eq!(-2, fb.signed_two_bits());
assert_eq!(2, fb.signed_eight_bits());
assert_eq!(1, fb.signed_eight_bits_unaligned());
fb.set_signed_two_bits(-1);
assert_eq!(0b11, fb.0);
assert_eq!(-1, fb.signed_two_bits());
assert_eq!(3, fb.signed_eight_bits());
assert_eq!(1, fb.signed_eight_bits_unaligned());
fb.set_signed_two_bits(0);
assert_eq!(0, fb.0);
assert_eq!(0, fb.signed_two_bits());
assert_eq!(0, fb.signed_eight_bits());
assert_eq!(0, fb.signed_eight_bits_unaligned());
fb.set_signed_two_bits(1);
assert_eq!(1, fb.0);
assert_eq!(1, fb.signed_two_bits());
assert_eq!(1, fb.signed_eight_bits());
assert_eq!(0, fb.signed_eight_bits_unaligned());
fb.set_signed_eight_bits(0);
assert_eq!(0, fb.0);
assert_eq!(0, fb.signed_two_bits());
assert_eq!(0, fb.signed_eight_bits());
assert_eq!(0, fb.signed_eight_bits_unaligned());
fb.set_signed_eight_bits(-1);
assert_eq!(0xFF, fb.0);
assert_eq!(-1, fb.signed_two_bits());
assert_eq!(-1, fb.signed_eight_bits());
assert_eq!(127, fb.signed_eight_bits_unaligned());
fb.set_signed_eight_bits(-128);
assert_eq!(0x80, fb.0);
assert_eq!(0, fb.signed_two_bits());
assert_eq!(-128, fb.signed_eight_bits());
assert_eq!(64, fb.signed_eight_bits_unaligned());
fb.set_signed_eight_bits(127);
assert_eq!(0x7F, fb.0);
assert_eq!(-1, fb.signed_two_bits());
assert_eq!(127, fb.signed_eight_bits());
assert_eq!(63, fb.signed_eight_bits_unaligned());
fb.set_signed_eight_bits_unaligned(0);
assert_eq!(1, fb.0);
assert_eq!(1, fb.signed_two_bits());
assert_eq!(1, fb.signed_eight_bits());
assert_eq!(0, fb.signed_eight_bits_unaligned());
fb.set_signed_eight_bits(0);
fb.set_signed_eight_bits_unaligned(-1);
assert_eq!(0x1FE, fb.0);
assert_eq!(-2, fb.signed_two_bits());
assert_eq!(-2, fb.signed_eight_bits());
assert_eq!(-1, fb.signed_eight_bits_unaligned());
fb.set_signed_eight_bits_unaligned(-128);
assert_eq!(0x100, fb.0);
assert_eq!(0, fb.signed_two_bits());
assert_eq!(0, fb.signed_eight_bits());
assert_eq!(-128, fb.signed_eight_bits_unaligned());
fb.set_signed_eight_bits_unaligned(127);
assert_eq!(0xFE, fb.0);
assert_eq!(-2, fb.signed_two_bits());
assert_eq!(-2, fb.signed_eight_bits());
assert_eq!(127, fb.signed_eight_bits_unaligned());
}
#[test]
fn test_field_type() {
let fb = FooBar(0);
let _: u32 = fb.foo1();
let _: u8 = fb.foo2();
let _: u8 = fb.foo3();
let _: u16 = fb.foo4();
let _: u8 = fb.foo5(0);
let _: u32 = fb.foo6(0);
let _: bool = fb.foo7();
let _: u8 = fb.foo8();
let _: u32 = fb.foo9();
let _: u16 = fb.foo10();
let _: u32 = fb.foo11();
let _: u16 = fb.foo12();
let _: Foo = fb.into_foo1();
let _: Foo = fb.into_foo2();
let _: Foo = fb.into_foo3();
let _: Foo = fb.into_foo4();
let _: Foo = fb.into_foo6(0);
let _: Foo = fb.from_foo1();
let _: Foo = fb.from_foo3();
let _: Foo = fb.from_foo5(0);
let _: i8 = fb.signed_single_bit();
let _: i8 = fb.signed_two_bits();
let _: i8 = fb.signed_eight_bits();
let _: i8 = fb.signed_eight_bits_unaligned();
let _: u128 = fb.u128_getter();
let _: i128 = fb.i128_getter();
}
#[test]
fn test_into_setter() {
let mut fb = FooBar(0);
// We just check that the parameter type is correct
fb.set_into_foo1(0u8);
fb.set_into_foo2(0u8);
fb.set_into_foo3(0u8);
fb.set_into_foo4(0u8);
}
#[test]
fn test_from_setter() {
let mut fb = FooBar(0);
assert_eq!(0, fb.0);
fb.set_from_foo1(Foo(1));
assert_eq!(1 << 31, fb.0);
fb.set_from_foo1(Foo(0));
assert_eq!(0, fb.0);
fb.set_from_foo2(Foo(1));
assert_eq!(1 << 31, fb.0);
fb.set_from_foo2(Foo(0));
assert_eq!(0, fb.0);
fb.set_from_foo3(Foo(1));
assert_eq!(1 << 31, fb.0);
fb.set_from_foo3(Foo(0));
assert_eq!(0, fb.0);
fb.set_from_foo4(Foo(1));
assert_eq!(1 << 31, fb.0);
fb.set_from_foo4(Foo(0));
assert_eq!(0, fb.0);
fb.set_from_foo5(1, Foo(1));
assert_eq!(1 << 30, fb.0);
}
#[test]
fn test_all_bits() {
let mut fb = FooBar(0);
assert_eq!(0, fb.all_bits());
fb.set_all_bits(!0u32);
assert_eq!(!0u32, fb.0);
assert_eq!(!0u32, fb.all_bits());
fb.0 = 0x8000_0001;
assert_eq!(0x8000_0001, fb.all_bits());
}
#[test]
fn test_is_copy() {
let a = FooBar(0);
let _b = a;
let _c = a;
}
#[test]
fn test_debug() {
let fb = FooBar(1_234_567_890);
let expected = "FooBar { .0: 1234567890, foo1: 0, foo2: 0, foo3: 2, foo3: 2, foo4: 4, foo5: [0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0], foo6: [2, 3, 1], getter_only: 1, getter_only_array: [2, 3, 1], all_bits: 1234567890, single_bit: false, into_foo1: Foo(0), into_foo2: Foo(0), from_foo1: Foo(0), into_foo3: Foo(0), into_foo4: Foo(0), into_foo6: [Foo(0), Foo(1), Foo(0)], from_foo3: Foo(0), from_foo5: [Foo(0), Foo(1), Foo(0)], from_foo6: Foo(0), signed_single_bit: 0, signed_two_bits: -2, signed_eight_bits: -46, signed_eight_bits_unaligned: 105, u128_getter: 105, i128_getter: 105 }";
assert_eq!(expected, format!("{:?}", fb))
}
bitfield! {
struct ArrayBitfield([u8]);
u32;
foo1, set_foo1: 0, 0;
foo2, set_foo2: 7, 0;
foo3, set_foo3: 8, 1;
foo4, set_foo4: 19, 4;
i32;
signed_foo1, set_signed_foo1: 0, 0;
signed_foo2, set_signed_foo2: 7, 0;
signed_foo3, set_signed_foo3: 8, 1;
signed_foo4, set_signed_foo4: 19, 4;
u128, u128_getter, set_u128: 19, 4;
}
#[test]
fn test_arraybitfield() {
let mut ab = ArrayBitfield([0; 3]);
assert_eq!(0u32, ab.foo1());
assert_eq!(0u32, ab.foo2());
assert_eq!(0u32, ab.foo3());
assert_eq!(0u32, ab.foo4());
assert_eq!(0i32, ab.signed_foo1());
assert_eq!(0i32, ab.signed_foo2());
assert_eq!(0i32, ab.signed_foo3());
assert_eq!(0i32, ab.signed_foo4());
assert_eq!(0u128, ab.u128_getter());
ab.set_foo1(1);
assert_eq!([1, 0, 0], ab.0);
assert_eq!(1, ab.foo1());
assert_eq!(1, ab.foo2());
assert_eq!(0, ab.foo3());
assert_eq!(0, ab.foo4());
assert_eq!(-1, ab.signed_foo1());
assert_eq!(1, ab.signed_foo2());
assert_eq!(0, ab.signed_foo3());
assert_eq!(0, ab.signed_foo4());
assert_eq!(0, ab.u128_getter());
ab.set_foo1(0);
ab.set_foo2(0xFF);
assert_eq!([0xFF, 0, 0], ab.0);
assert_eq!(1, ab.foo1());
assert_eq!(0xFF, ab.foo2());
assert_eq!(0x7F, ab.foo3());
assert_eq!(0x0F, ab.foo4());
assert_eq!(-1, ab.signed_foo1());
assert_eq!(-1, ab.signed_foo2());
assert_eq!(127, ab.signed_foo3());
assert_eq!(0x0F, ab.signed_foo4());
assert_eq!(0x0F, ab.u128_getter());
ab.set_foo2(0);
ab.set_foo3(0xFF);
assert_eq!([0xFE, 0x01, 0], ab.0);
assert_eq!(0, ab.foo1());
assert_eq!(0xFE, ab.foo2());
assert_eq!(0xFF, ab.foo3());
assert_eq!(0x1F, ab.foo4());
assert_eq!(0, ab.signed_foo1());
assert_eq!(-2, ab.signed_foo2());
assert_eq!(-1, ab.signed_foo3());
assert_eq!(0x1F, ab.signed_foo4());
assert_eq!(0x1F, ab.u128_getter());
ab.set_foo3(0);
ab.set_foo4(0xFFFF);
assert_eq!([0xF0, 0xFF, 0x0F], ab.0);
assert_eq!(0, ab.foo1());
assert_eq!(0xF0, ab.foo2());
assert_eq!(0xF8, ab.foo3());
assert_eq!(0xFFFF, ab.foo4());
assert_eq!(0, ab.signed_foo1());
assert_eq!(-16, ab.signed_foo2());
assert_eq!(-8, ab.signed_foo3());
assert_eq!(-1, ab.signed_foo4());
assert_eq!(0xFFFF, ab.u128_getter());
ab.set_foo4(0x0);
ab.set_signed_foo1(0);
assert_eq!([0x00, 0x00, 0x00], ab.0);
ab.set_signed_foo1(-1);
assert_eq!([0x01, 0x00, 0x00], ab.0);
ab.set_signed_foo1(0);
ab.set_signed_foo2(127);
assert_eq!([0x7F, 0x00, 0x00], ab.0);
ab.set_signed_foo2(-128);
assert_eq!([0x80, 0x00, 0x00], ab.0);
ab.set_signed_foo2(1);
assert_eq!([0x01, 0x00, 0x00], ab.0);
ab.set_signed_foo2(-1);
assert_eq!([0xFF, 0x00, 0x00], ab.0);
ab.set_signed_foo2(0);
ab.set_signed_foo3(127);
assert_eq!([0xFE, 0x00, 0x00], ab.0);
ab.set_signed_foo3(-1);
assert_eq!([0xFE, 0x01, 0x00], ab.0);
ab.set_signed_foo3(0);
ab.set_signed_foo4(-1);
assert_eq!([0xF0, 0xFF, 0x0F], ab.0);
ab.set_signed_foo4(0);
ab.set_u128(0xFFFF);
assert_eq!([0xF0, 0xFF, 0x0F], ab.0);
}
#[test]
fn test_arraybitfield2() {
// Check that the macro can be called from a function.
bitfield! {
struct ArrayBitfield2([u16]);
impl Debug;
u32;
foo1, set_foo1: 0, 0;
foo2, set_foo2: 7, 0;
foo3, set_foo3: 8, 1;
foo4, set_foo4: 20, 4;
}
let mut ab = ArrayBitfield2([0; 2]);
assert_eq!(0, ab.foo1());
assert_eq!(0, ab.foo2());
assert_eq!(0, ab.foo3());
assert_eq!(0, ab.foo4());
ab.set_foo1(1);
assert_eq!([1, 0], ab.0);
assert_eq!(1, ab.foo1());
assert_eq!(1, ab.foo2());
assert_eq!(0, ab.foo3());
assert_eq!(0, ab.foo4());
ab.set_foo1(0);
ab.set_foo2(0xFF);
assert_eq!([0xFF, 0], ab.0);
assert_eq!(1, ab.foo1());
assert_eq!(0xFF, ab.foo2());
assert_eq!(0x7F, ab.foo3());
assert_eq!(0x0F, ab.foo4());
ab.set_foo2(0);
ab.set_foo3(0xFF);
assert_eq!([0x1FE, 0x0], ab.0);
assert_eq!(0, ab.foo1());
assert_eq!(0xFE, ab.foo2());
assert_eq!(0xFF, ab.foo3());
assert_eq!(0x1F, ab.foo4());
ab.set_foo3(0);
ab.set_foo4(0xFFFF);
assert_eq!([0xFFF0, 0xF], ab.0);
assert_eq!(0, ab.foo1());
assert_eq!(0xF0, ab.foo2());
assert_eq!(0xF8, ab.foo3());
assert_eq!(0xFFFF, ab.foo4());
}
bitfield! {
struct ArrayBitfieldMsb0(MSB0 [u8]);
impl Debug;
u32;
foo1, set_foo1: 0, 0;
foo2, set_foo2: 7, 0;
foo3, set_foo3: 8, 1;
foo4, set_foo4: 19, 4;
i32;
signed_foo1, set_signed_foo1: 0, 0;
signed_foo2, set_signed_foo2: 7, 0;
signed_foo3, set_signed_foo3: 8, 1;
signed_foo4, set_signed_foo4: 19, 4;
}
#[test]
fn test_arraybitfield_msb0() {
let mut ab = ArrayBitfieldMsb0([0; 3]);
assert_eq!(0, ab.foo1());
assert_eq!(0, ab.foo2());
assert_eq!(0, ab.foo3());
assert_eq!(0, ab.foo4());
assert_eq!(0, ab.signed_foo1());
assert_eq!(0, ab.signed_foo2());
assert_eq!(0, ab.signed_foo3());
assert_eq!(0, ab.signed_foo4());
ab.set_foo1(1);
assert_eq!([0b1000_0000, 0, 0], ab.0);
assert_eq!(1, ab.foo1());
assert_eq!(0b1000_0000, ab.foo2());
assert_eq!(0, ab.foo3());
assert_eq!(0, ab.foo4());
assert_eq!(-1, ab.signed_foo1());
assert_eq!(-128, ab.signed_foo2());
assert_eq!(0, ab.signed_foo3());
assert_eq!(0, ab.signed_foo4());
ab.set_foo1(0);
ab.set_foo2(0xFF);
assert_eq!([0b1111_1111, 0, 0], ab.0);
assert_eq!(1, ab.foo1());
assert_eq!(0b1111_1111, ab.foo2());
assert_eq!(0b1111_1110, ab.foo3());
assert_eq!(0b1111_0000_0000_0000, ab.foo4());
assert_eq!(-1, ab.signed_foo1());
assert_eq!(-1, ab.signed_foo2());
assert_eq!(-2, ab.signed_foo3());
assert_eq!(-4096, ab.signed_foo4());
ab.set_foo2(0);
ab.set_foo3(0xFF);
assert_eq!([0b0111_1111, 0b1000_0000, 0], ab.0);
assert_eq!(0, ab.foo1());
assert_eq!(0b0111_1111, ab.foo2());
assert_eq!(0xFF, ab.foo3());
assert_eq!(0b1111_1000_0000_0000, ab.foo4());
assert_eq!(0, ab.signed_foo1());
assert_eq!(127, ab.signed_foo2());
assert_eq!(-1, ab.signed_foo3());
assert_eq!(-2048, ab.signed_foo4());
ab.set_foo3(0);
ab.set_foo4(0xFFFF);
assert_eq!([0x0F, 0xFF, 0xF0], ab.0);
assert_eq!(0, ab.foo1());
assert_eq!(0x0F, ab.foo2());
assert_eq!(0b0001_1111, ab.foo3());
assert_eq!(0xFFFF, ab.foo4());
assert_eq!(0, ab.signed_foo1());
assert_eq!(0x0F, ab.signed_foo2());
assert_eq!(0b0001_1111, ab.signed_foo3());
assert_eq!(-1, ab.signed_foo4());
ab.set_foo4(0x0);
ab.set_signed_foo1(0);
assert_eq!([0x00, 0x00, 0x00], ab.0);
ab.set_signed_foo1(-1);
assert_eq!([0b1000_0000, 0x00, 0x00], ab.0);
ab.set_signed_foo1(0);
ab.set_signed_foo2(127);
assert_eq!([0x7F, 0x00, 0x00], ab.0);
ab.set_signed_foo2(-128);
assert_eq!([0x80, 0x00, 0x00], ab.0);
ab.set_signed_foo2(1);
assert_eq!([0x01, 0x00, 0x00], ab.0);
ab.set_signed_foo2(-1);
assert_eq!([0xFF, 0x00, 0x00], ab.0);
ab.set_signed_foo2(0);
ab.set_signed_foo3(127);
assert_eq!([0b0011_1111, 0b1000_0000, 0], ab.0);
ab.set_signed_foo3(-1);
assert_eq!([0b0111_1111, 0b1000_0000, 0], ab.0);
ab.set_signed_foo3(0);
ab.set_signed_foo4(-1);
assert_eq!([0x0F, 0xFF, 0xF0], ab.0);
}
mod some_module {
bitfield! {
pub struct PubBitFieldInAModule(u32);
impl Debug;
/// Attribute works on pub fields
pub field1, set_field1: 1;
pub field2, _: 1;
pub _, set_field3: 1;
pub u16, field4, set_field4: 1;
/// Check if multiple attributes are applied
#[cfg(not(test))]
pub u16, field4, set_field4: 1;
pub u16, _, set_field5: 1;
pub u16, field6, _: 1;
pub field7, set_field7: 1;
pub field8, set_field8: 1, 1;
#[cfg(not(test))]
/// And make sure not only the last attributes is applied
pub field8, set_field8: 1, 1;
pub field9, set_field9: 1, 1, 1;
pub u32, field10, set_field10: 1;
pub u32, field11, set_field11: 1, 1;
pub u32, field12, set_field12: 1, 1, 1;
}
}
#[test]
fn struct_can_be_public() {
let _ = some_module::PubBitFieldInAModule(0);
}
#[test]
fn field_can_be_public() {
let mut a = some_module::PubBitFieldInAModule(0);
let _ = a.field1();
a.set_field1(true);
let _ = a.field2();
a.set_field3(true);
let _ = a.field4();
a.set_field4(true);
a.set_field5(true);
let _ = a.field6();
let _ = a.field7();
a.set_field7(true);
let _ = a.field8();
a.set_field8(0);
let _ = a.field9(0);
a.set_field9(0, 0);
let _ = a.field10();
a.set_field10(true);
let _ = a.field11();
a.set_field11(0);
let _ = a.field12(0);
a.set_field12(0, 0);
}
// Everything in this module is to make sure that its possible to specify types
// in most of the possible ways.
#[allow(dead_code)]
mod test_types {
use bitfield::BitRange;
use std;
use std::sync::atomic::{self, AtomicUsize};
struct Foo;
impl Foo {
bitfield_fields! {
std::sync::atomic::AtomicUsize, field1, set_field1: 0, 0;
std::sync::atomic::AtomicUsize;
field2, set_field2: 0, 0;
::std::sync::atomic::AtomicUsize, field3, set_field3: 0, 0;
::std::sync::atomic::AtomicUsize;
field4, set_field4: 0, 0;
atomic::AtomicUsize, field5, set_field5: 0, 0;
atomic::AtomicUsize;
field6, set_field6: 0, 0;
AtomicUsize, field7, set_field7: 0, 0;
AtomicUsize;
field8, set_field8: 0, 0;
Vec<std::sync::atomic::AtomicUsize>, field9, set_field9: 0, 0;
Vec<std::sync::atomic::AtomicUsize>;
field10, set_field10: 0, 0;
Vec<::std::sync::atomic::AtomicUsize>, field11, set_field11: 0, 0;
Vec<::std::sync::atomic::AtomicUsize>;
field12, set_field12: 0, 0;
Vec<atomic::AtomicUsize>, field13, set_field13: 0, 0;
Vec<atomic::AtomicUsize>;
field14, set_field14: 0, 0;
Vec<AtomicUsize>, field15, set_field15: 0, 0;
Vec<AtomicUsize>;
field16, set_field16: 0, 0;
&str, field17, set_field17: 0, 0;
&str;
field18, set_field18: 0, 0;
&'static str, field19, set_field19: 0, 0;
&'static str;
field20, set_field20: 0, 0;
}
}
impl BitRange<AtomicUsize> for Foo {
fn bit_range(&self, _msb: usize, _lsb: usize) -> AtomicUsize {
AtomicUsize::new(0)
}
fn set_bit_range(&mut self, _msb: usize, _lsb: usize, _value: AtomicUsize) {}
}
impl BitRange<Vec<AtomicUsize>> for Foo {
fn bit_range(&self, _msb: usize, _lsb: usize) -> Vec<AtomicUsize> {
vec![AtomicUsize::new(0)]
}
fn set_bit_range(&mut self, _msb: usize, _lsb: usize, _value: Vec<AtomicUsize>) {}
}
impl<'a> BitRange<&'a str> for Foo {
fn bit_range(&self, _msb: usize, _lsb: usize) -> &'a str {
""
}
fn set_bit_range(&mut self, _msb: usize, _lsb: usize, _value: &'a str) {}
}
#[test]
fn test_field_type() {
let test = Foo;
let _: AtomicUsize = test.field1();
let _: AtomicUsize = test.field2();
let _: AtomicUsize = test.field3();
let _: AtomicUsize = test.field4();
let _: AtomicUsize = test.field5();
let _: AtomicUsize = test.field6();
let _: AtomicUsize = test.field7();
let _: AtomicUsize = test.field8();
let _: Vec<AtomicUsize> = test.field9();
let _: Vec<AtomicUsize> = test.field10();
let _: Vec<AtomicUsize> = test.field11();
let _: Vec<AtomicUsize> = test.field12();
let _: Vec<AtomicUsize> = test.field13();
let _: Vec<AtomicUsize> = test.field14();
let _: Vec<AtomicUsize> = test.field15();
let _: Vec<AtomicUsize> = test.field16();
let _: &str = test.field17();
let _: &str = test.field18();
let _: &'static str = test.field19();
let _: &'static str = test.field20();
}
}
#[allow(dead_code)]
mod test_no_default_bitrange {
use bitfield::BitRange;
use std::fmt::Debug;
use std::fmt::Error;
use std::fmt::Formatter;
bitfield! {
#[derive(Eq, PartialEq)]
pub struct BitField1(u16);
no default BitRange;
impl Debug;
u8;
field1, set_field1: 10, 0;
pub field2, _ : 12, 3;
field3, set_field3: 2;
}
impl BitRange<u8> for BitField1 {
fn bit_range(&self, msb: usize, lsb: usize) -> u8 {
(msb + lsb) as u8
}
fn set_bit_range(&mut self, msb: usize, lsb: usize, value: u8) {
self.0 = msb as u16 + lsb as u16 + u16::from(value)
}
}
#[allow(unknown_lints)]
#[allow(identity_op)]
#[test]
fn custom_bitrange_implementation_is_used() {
let mut bf = BitField1(0);
assert_eq!(bf.field1(), 10 + 0);
assert_eq!(bf.field2(), 12 + 3);
assert_eq!(bf.field3(), true);
bf.set_field1(42);
assert_eq!(bf, BitField1(10 + 0 + 42));
}
bitfield! {
pub struct BitField2(u16);
no default BitRange;
u8;
field1, set_field1: 10, 0;
pub field2, _ : 12, 3;
field3, set_field3: 0;
}
impl BitRange<u8> for BitField2 {
fn bit_range(&self, _msb: usize, _lsb: usize) -> u8 {
0
}
fn set_bit_range(&mut self, _msb: usize, _lsb: usize, _value: u8) {}
}
// Make sure Debug wasn't implemented by implementing it.
impl Debug for BitField2 {
fn fmt(&self, _: &mut Formatter) -> Result<(), Error> {
unimplemented!()
}
}
// Check that we can put `impl Debug` before `no default BitRange`
bitfield! {
pub struct BitField3(u16);
impl Debug;
no default BitRange;
u8;
field1, set_field1: 10, 0;
pub field2, _ : 12, 3;
field3, set_field3: 0;
}
impl BitRange<u8> for BitField3 {
fn bit_range(&self, _msb: usize, _lsb: usize) -> u8 {
0
}
fn set_bit_range(&mut self, _msb: usize, _lsb: usize, _value: u8) {}
}
bitfield! {
#[derive(Eq, PartialEq)]
pub struct BitField4([u16]);
no default BitRange;
impl Debug;
u8;
field1, set_field1: 10, 0;
pub field2, _ : 12, 3;
field3, set_field3: 2;
}
impl<T> BitRange<u8> for BitField4<T> {
fn bit_range(&self, _msb: usize, _lsb: usize) -> u8 {
0
}
fn | (&mut self, _msb: usize, _lsb: usize, _value: u8) {}
}
bitfield! {
pub struct BitField5([u16]);
no default BitRange;
u8;
field1, set_field1: 10, 0;
pub field2, _ : 12, 3;
field3, set_field3: 0;
}
impl<T> BitRange<u8> for BitField5<T> {
fn bit_range(&self, _msb: usize, _lsb: usize) -> u8 {
0
}
fn set_bit_range(&mut self, _msb: usize, _lsb: usize, _value: u8) {}
}
// Make sure Debug wasn't implemented by implementing it.
impl<T> Debug for BitField5<T> {
fn fmt(&self, _: &mut Formatter) -> Result<(), Error> {
unimplemented!()
}
}
// Check that we can put `impl Debug` before `no default BitRange`
bitfield! {
pub struct BitField6([u16]);
impl Debug;
no default BitRange;
u8;
field1, set_field1: 10, 0;
pub field2, _ : 12, 3;
field3, set_field3: 0;
}
impl<T> BitRange<u8> for BitField6<T> {
fn bit_range(&self, _msb: usize, _lsb: usize) -> u8 {
0
}
fn set_bit_range(&mut self, _msb: usize, _lsb: usize, _value: u8) {}
}
bitfield! {
#[derive(Eq, PartialEq)]
pub struct BitField7(MSB0 [u16]);
no default BitRange;
impl Debug;
u8;
field1, set_field1: 10, 0;
pub field2, _ : 12, 3;
field3, set_field3: 2;
}
impl<T> BitRange<u8> for BitField7<T> {
fn bit_range(&self, _msb: usize, _lsb: usize) -> u8 {
0
}
fn set_bit_range(&mut self, _msb: usize, _lsb: usize, _value: u8) {}
}
bitfield! {
pub struct BitField8(MSB0 [u16]);
no default BitRange;
u8;
field1, set_field1: 10, 0;
pub field2, _ : 12, 3;
field3, set_field3: 0;
}
impl<T> BitRange<u8> for BitField8<T> {
fn bit_range(&self, _msb: usize, _lsb: usize) -> u8 {
0
}
fn set_bit_range(&mut self, _msb: usize, _lsb: usize, _value: u8) {}
}
// Make sure Debug wasn't implemented by implementing it.
impl<T> Debug for BitField8<T> {
fn fmt(&self, _: &mut Formatter) -> Result<(), Error> {
unimplemented!()
}
}
// Check that we can put `impl Debug` before `no default BitRange`
bitfield! {
pub struct BitField9([u16]);
impl Debug;
no default BitRange;
u8;
field1, set_field1: 10, 0;
pub field2, _ : 12, 3;
field3, set_field3: 0;
}
impl<T> BitRange<u8> for BitField9<T> {
fn bit_range(&self, _msb: usize, _lsb: usize) -> u8 {
0
}
fn set_bit_range(&mut self, _msb: usize, _lsb: usize, _value: u8) {}
}
#[test]
fn test_debug_is_implemented_with_no_default_bitrange() {
format!("{:?}", BitField1(0));
format!("{:?}", BitField3(0));
format!("{:?}", BitField4([0; 1]));
format!("{:?}", BitField6([0; 1]));
format!("{:?}", BitField7([0; 1]));
format!("{:?}", BitField9([0; 1]));
}
}
| set_bit_range |
list_map.go | package anyconvert
import (
"fmt"
"github.com/golang/protobuf/ptypes/any"
)
// AnyToFloatListMap unmarshal an arbitrary any.Any message to a map[string][]float32.
// Declaration & implementation generation are located in internal/proto/value package
// It expect an any.Any message which contain the url type for a
// value.MapValue. It returns an error if the target message in
// any.Any does not match or if an unmarshal error occurs.
func AnyToFloatListMap(value *any.Any) (map[string][]float32, error) {
mapList := make(map[string][]float32)
mapValue, err := AnyToMapValue(value)
if err != nil {
return nil, fmt.Errorf("unable to convert to a map[string][]float32: %w", err)
}
for k, value := range mapValue.Value {
listValue, err := AnyToListValue(value)
if err != nil {
return nil, fmt.Errorf("unable to convert to a map[string][]float32: %w", err)
}
listType := make([]float32, 0, len(listValue.Value))
for _, v := range listValue.Value {
val, err := AnyToFloat(v)
if err != nil {
return nil, fmt.Errorf("unable to convert to a map[string][]float32: %w", err)
}
listType = append(listType, val)
}
mapList[k] = listType
}
return mapList, nil
}
func MustAnyToFloatListMap(value *any.Any) map[string][]float32 {
v, err := AnyToFloatListMap(value)
handlePanic(err)
return v
}
// AnyToIntListMap unmarshal an arbitrary any.Any message to a map[string][]int32.
// Declaration & implementation generation are located in internal/proto/value package
// It expect an any.Any message which contain the url type for a
// value.MapValue. It returns an error if the target message in
// any.Any does not match or if an unmarshal error occurs.
func AnyToIntListMap(value *any.Any) (map[string][]int32, error) {
mapList := make(map[string][]int32)
mapValue, err := AnyToMapValue(value)
if err != nil {
return nil, fmt.Errorf("unable to convert to a map[string][]int32: %w", err)
}
for k, value := range mapValue.Value {
listValue, err := AnyToListValue(value)
if err != nil {
return nil, fmt.Errorf("unable to convert to a map[string][]int32: %w", err)
}
listType := make([]int32, 0, len(listValue.Value))
for _, v := range listValue.Value {
val, err := AnyToInt(v)
if err != nil {
return nil, fmt.Errorf("unable to convert to a map[string][]int32: %w", err)
}
listType = append(listType, val)
}
mapList[k] = listType
}
return mapList, nil
}
func MustAnyToIntListMap(value *any.Any) map[string][]int32 {
v, err := AnyToIntListMap(value)
handlePanic(err)
return v
}
// AnyToStringListMap unmarshal an arbitrary any.Any message to a map[string][]string.
// Declaration & implementation generation are located in internal/proto/value package
// It expect an any.Any message which contain the url type for a
// value.MapValue. It returns an error if the target message in
// any.Any does not match or if an unmarshal error occurs.
func AnyToStringListMap(value *any.Any) (map[string][]string, error) {
mapList := make(map[string][]string)
mapValue, err := AnyToMapValue(value)
if err != nil {
return nil, fmt.Errorf("unable to convert to a map[string][]string: %w", err)
}
for k, value := range mapValue.Value {
listValue, err := AnyToListValue(value)
if err != nil {
return nil, fmt.Errorf("unable to convert to a map[string][]string: %w", err)
}
listType := make([]string, 0, len(listValue.Value))
for _, v := range listValue.Value {
val, err := AnyToString(v)
if err != nil {
return nil, fmt.Errorf("unable to convert to a map[string][]string: %w", err)
}
listType = append(listType, val)
}
mapList[k] = listType
}
return mapList, nil
}
func MustAnyToStringListMap(value *any.Any) map[string][]string {
v, err := AnyToStringListMap(value)
handlePanic(err)
return v
}
// AnyToBoolMap unmarshal an arbitrary any.Any message to a map[string][]bool.
// Declaration & implementation generation are located in internal/proto/value package
// It expect an any.Any message which contain the url type for a
// value.MapValue. It returns an error if the target message in
// any.Any does not match or if an unmarshal error occurs.
func AnyToBoolListMap(value *any.Any) (map[string][]bool, error) {
mapList := make(map[string][]bool)
mapValue, err := AnyToMapValue(value)
if err != nil {
return nil, fmt.Errorf("unable to convert to a map[string][]bool: %w", err)
}
for k, value := range mapValue.Value {
listValue, err := AnyToListValue(value)
if err != nil {
return nil, fmt.Errorf("unable to convert to a map[string][]bool: %w", err)
}
listType := make([]bool, 0, len(listValue.Value))
for _, v := range listValue.Value {
val, err := AnyToBool(v)
if err != nil {
return nil, fmt.Errorf("unable to convert to a map[string][]bool: %w", err)
}
listType = append(listType, val)
}
mapList[k] = listType
}
return mapList, nil
}
func MustAnyToBoolListMap(value *any.Any) map[string][]bool {
v, err := AnyToBoolListMap(value)
handlePanic(err)
return v
}
// FloatListMapToAny return an arbitrary any.Any message which contain the url type for a value.MapValue.
// Declaration & implementation generation are located in internal/proto/value package
// If an unmarshal error occurs, it return the error.
func FloatListMapToAny(values map[string][]float32) (*any.Any, error) {
mapListValue := make(map[string][]*any.Any)
for k, value := range values {
anyList := make([]*any.Any, 0, len(value))
for _, v := range value {
val, err := FloatToAny(v)
if err != nil {
return nil, err
}
anyList = append(anyList, val)
}
mapListValue[k] = anyList
}
return ListMapToAny(mapListValue)
}
func MustFloatListMapToAny(values map[string][]float32) *any.Any {
v, err := FloatListMapToAny(values)
handlePanic(err)
return v
}
// IntListMapToAny return an arbitrary any.Any message which contain the url type for a value.MapValue.
// Declaration & implementation generation are located in internal/proto/value package
// If an unmarshal error occurs, it return the error.
func | (values map[string][]int32) (*any.Any, error) {
mapListValue := make(map[string][]*any.Any)
for k, value := range values {
anyList := make([]*any.Any, 0, len(value))
for _, v := range value {
val, err := IntToAny(v)
if err != nil {
return nil, err
}
anyList = append(anyList, val)
}
mapListValue[k] = anyList
}
return ListMapToAny(mapListValue)
}
func MustIntListMapToAny(values map[string][]int32) *any.Any {
v, err := IntListMapToAny(values)
handlePanic(err)
return v
}
// StringListMapToAny return an arbitrary any.Any message which contain the url type for a value.MapValue.
// Declaration & implementation generation are located in internal/proto/value package
// If an unmarshal error occurs, it return the error.
func StringListMapToAny(values map[string][]string) (*any.Any, error) {
mapListValue := make(map[string][]*any.Any)
for k, value := range values {
anyList := make([]*any.Any, 0, len(value))
for _, v := range value {
val, err := StringToAny(v)
if err != nil {
return nil, err
}
anyList = append(anyList, val)
}
mapListValue[k] = anyList
}
return ListMapToAny(mapListValue)
}
func MustStringListMapToAny(values map[string][]string) *any.Any {
v, err := StringListMapToAny(values)
handlePanic(err)
return v
}
// BoolListMapToAny return an arbitrary any.Any message which contain the url type for a value.MapValue.
// Declaration & implementation generation are located in internal/proto/value package
// If an unmarshal error occurs, it return the error.
func BoolListMapToAny(values map[string][]bool) (*any.Any, error) {
mapListValue := make(map[string][]*any.Any)
for k, value := range values {
anyList := make([]*any.Any, 0, len(value))
for _, v := range value {
val, err := BoolToAny(v)
if err != nil {
return nil, err
}
anyList = append(anyList, val)
}
mapListValue[k] = anyList
}
return ListMapToAny(mapListValue)
}
func MustBoolListMapToAny(values map[string][]bool) *any.Any {
v, err := BoolListMapToAny(values)
handlePanic(err)
return v
}
// ListMapToAny return an arbitrary any.Any message which contain the url type for a value.MapValue.
// Declaration & implementation generation are located in internal/proto/value package
// If an unmarshal error occurs, it return the error.
func ListMapToAny(values map[string][]*any.Any) (*any.Any, error) {
anyMap := make(map[string]*any.Any)
for k, value := range values {
listValue, err := ListToAny(value)
if err != nil {
return nil, err
}
anyMap[k] = listValue
}
mapValue, err := MapToAny(anyMap)
if err != nil {
return nil, err
}
return mapValue, nil
}
| IntListMapToAny |
point_in_polygon.rs | mod tester;
mod wrapper;
use std::cmp::min;
use std::sync::Arc;
use futures::stream::BoxStream;
use futures::{StreamExt, TryStreamExt};
use geoengine_datatypes::dataset::DatasetId;
use geoengine_datatypes::primitives::VectorQueryRectangle;
use rayon::ThreadPool;
use serde::{Deserialize, Serialize};
use snafu::ensure;
use crate::adapters::FeatureCollectionChunkMerger;
use crate::engine::{
ExecutionContext, InitializedVectorOperator, Operator, QueryContext, TypedVectorQueryProcessor,
VectorOperator, VectorQueryProcessor, VectorResultDescriptor,
};
use crate::engine::{OperatorDatasets, QueryProcessor};
use crate::error;
use crate::util::Result;
use arrow::array::BooleanArray;
use async_trait::async_trait;
use geoengine_datatypes::collections::{
FeatureCollectionInfos, FeatureCollectionModifications, GeometryCollection,
MultiPointCollection, MultiPolygonCollection, VectorDataType,
};
pub use tester::PointInPolygonTester;
pub use wrapper::PointInPolygonTesterWithCollection;
/// The point in polygon filter requires two inputs in the following order:
/// 1. a `MultiPointCollection` source
/// 2. a `MultiPolygonCollection` source
/// Then, it filters the `MultiPolygonCollection`s so that only those features are retained that are in any polygon.
pub type PointInPolygonFilter = Operator<PointInPolygonFilterParams, PointInPolygonFilterSource>;
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct PointInPolygonFilterParams {}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct PointInPolygonFilterSource {
pub points: Box<dyn VectorOperator>,
pub polygons: Box<dyn VectorOperator>,
}
impl OperatorDatasets for PointInPolygonFilterSource {
fn datasets_collect(&self, datasets: &mut Vec<DatasetId>) {
self.points.datasets_collect(datasets);
self.polygons.datasets_collect(datasets);
}
}
#[typetag::serde]
#[async_trait]
impl VectorOperator for PointInPolygonFilter {
async fn initialize(
self: Box<Self>,
context: &dyn ExecutionContext,
) -> Result<Box<dyn InitializedVectorOperator>> {
let points = self.sources.points.initialize(context).await?;
let polygons = self.sources.polygons.initialize(context).await?;
ensure!(
points.result_descriptor().data_type == VectorDataType::MultiPoint,
error::InvalidType {
expected: VectorDataType::MultiPoint.to_string(),
found: points.result_descriptor().data_type.to_string(),
}
);
ensure!(
polygons.result_descriptor().data_type == VectorDataType::MultiPolygon,
error::InvalidType {
expected: VectorDataType::MultiPolygon.to_string(),
found: polygons.result_descriptor().data_type.to_string(),
}
);
let initialized_operator = InitializedPointInPolygonFilter {
result_descriptor: points.result_descriptor().clone(),
points,
polygons,
};
Ok(initialized_operator.boxed())
}
}
pub struct InitializedPointInPolygonFilter {
points: Box<dyn InitializedVectorOperator>,
polygons: Box<dyn InitializedVectorOperator>,
result_descriptor: VectorResultDescriptor,
}
impl InitializedVectorOperator for InitializedPointInPolygonFilter {
fn query_processor(&self) -> Result<TypedVectorQueryProcessor> {
let point_processor = self
.points
.query_processor()?
.multi_point()
.expect("checked in `PointInPolygonFilter` constructor");
let polygon_processor = self
.polygons
.query_processor()?
.multi_polygon()
.expect("checked in `PointInPolygonFilter` constructor");
Ok(TypedVectorQueryProcessor::MultiPoint(
PointInPolygonFilterProcessor::new(point_processor, polygon_processor).boxed(),
))
}
fn result_descriptor(&self) -> &VectorResultDescriptor {
&self.result_descriptor
}
}
pub struct PointInPolygonFilterProcessor {
points: Box<dyn VectorQueryProcessor<VectorType = MultiPointCollection>>,
polygons: Box<dyn VectorQueryProcessor<VectorType = MultiPolygonCollection>>,
}
impl PointInPolygonFilterProcessor {
pub fn new(
points: Box<dyn VectorQueryProcessor<VectorType = MultiPointCollection>>,
polygons: Box<dyn VectorQueryProcessor<VectorType = MultiPolygonCollection>>,
) -> Self {
Self { points, polygons }
}
fn filter_parallel(
points: &Arc<MultiPointCollection>,
polygons: &MultiPolygonCollection,
thread_pool: &ThreadPool,
) -> Vec<bool> {
debug_assert!(!points.is_empty());
// TODO: parallelize over coordinate rather than features
let tester = Arc::new(PointInPolygonTester::new(polygons)); // TODO: multithread
let parallelism = thread_pool.current_num_threads();
let chunk_size = (points.len() as f64 / parallelism as f64).ceil() as usize;
let mut result = vec![false; points.len()];
thread_pool.scope(|scope| {
let num_features = points.len();
let feature_offsets = points.feature_offsets();
let time_intervals = points.time_intervals();
let coordinates = points.coordinates();
for (chunk_index, chunk_result) in (&mut result).chunks_mut(chunk_size).enumerate() {
let feature_index_start = chunk_index * chunk_size;
let features_index_end = min(feature_index_start + chunk_size, num_features);
let tester = tester.clone();
scope.spawn(move |_| {
for (
feature_index,
((coordinates_start_index, coordinates_end_index), time_interval),
) in two_tuple_windows(
feature_offsets[feature_index_start..=features_index_end]
.iter()
.map(|&c| c as usize),
)
.zip(time_intervals[feature_index_start..features_index_end].iter())
.enumerate()
{
let is_multi_point_in_polygon_collection = coordinates
[coordinates_start_index..coordinates_end_index]
.iter()
.any(|coordinate| {
tester.any_polygon_contains_coordinate(coordinate, time_interval)
});
chunk_result[feature_index] = is_multi_point_in_polygon_collection;
}
});
}
});
result
}
async fn filter_points(
ctx: &dyn QueryContext,
points: Arc<MultiPointCollection>,
polygons: MultiPolygonCollection,
initial_filter: &BooleanArray,
) -> Result<BooleanArray> {
let thread_pool = ctx.thread_pool().clone();
let thread_points = points.clone();
let filter = tokio::task::spawn_blocking(move || {
Self::filter_parallel(&thread_points, &polygons, &thread_pool)
})
.await?;
arrow::compute::or(initial_filter, &filter.into()).map_err(Into::into)
}
}
#[async_trait]
impl VectorQueryProcessor for PointInPolygonFilterProcessor {
type VectorType = MultiPointCollection;
async fn vector_query<'a>(
&'a self,
query: VectorQueryRectangle,
ctx: &'a dyn QueryContext,
) -> Result<BoxStream<'a, Result<Self::VectorType>>> {
let filtered_stream =
self.points
.query(query, ctx)
.await?
.and_then(move |points| async move {
if points.is_empty() {
return Ok(points);
}
let initial_filter = BooleanArray::from(vec![false; points.len()]);
let arc_points = Arc::new(points);
let filter = self
.polygons
.query(query, ctx)
.await?
.fold(Ok(initial_filter), |filter, polygons| async {
let polygons = polygons?;
if polygons.is_empty() {
return filter;
}
Self::filter_points(ctx, arc_points.clone(), polygons, &filter?).await
})
.await?;
arc_points.filter(filter).map_err(Into::into)
});
Ok(
FeatureCollectionChunkMerger::new(filtered_stream.fuse(), ctx.chunk_byte_size().into())
.boxed(),
)
}
}
/// Loop through an iterator by yielding the current and previous tuple. Starts with the
/// (first, second) item, so the iterator must have more than one item to create an output.
fn two_tuple_windows<I, T>(mut iter: I) -> impl Iterator<Item = (T, T)>
where
I: Iterator<Item = T>,
T: Copy,
{
let mut last = iter.next();
iter.map(move |item| {
let output = (last.unwrap(), item);
last = Some(item);
output
})
}
#[cfg(test)]
mod tests {
use super::*;
use geoengine_datatypes::primitives::{
BoundingBox2D, Coordinate2D, MultiPoint, MultiPolygon, SpatialResolution, TimeInterval,
};
use geoengine_datatypes::util::test::TestDefault;
use crate::engine::{ChunkByteSize, MockExecutionContext, MockQueryContext};
use crate::mock::MockFeatureCollectionSource;
#[test]
fn point_in_polygon_boundary_conditions() {
let collection = MultiPolygonCollection::from_data(
vec![MultiPolygon::new(vec![vec![vec![
(0.0, 0.0).into(),
(10.0, 0.0).into(),
(10.0, 10.0).into(),
(0.0, 10.0).into(),
(0.0, 0.0).into(),
]]])
.unwrap()],
vec![Default::default(); 1],
Default::default(),
)
.unwrap();
let tester = PointInPolygonTester::new(&collection);
// the algorithm is not stable for boundary cases directly on the edges
assert!(tester.any_polygon_contains_coordinate(
&Coordinate2D::new(0.000_001, 0.000_001),
&Default::default()
),);
assert!(tester.any_polygon_contains_coordinate(
&Coordinate2D::new(0.000_001, 0.1),
&Default::default()
),);
assert!(tester.any_polygon_contains_coordinate(
&Coordinate2D::new(0.1, 0.000_001),
&Default::default()
),);
assert!(tester
.any_polygon_contains_coordinate(&Coordinate2D::new(9.9, 9.9), &Default::default()),);
assert!(tester
.any_polygon_contains_coordinate(&Coordinate2D::new(10.0, 9.9), &Default::default()),);
assert!(tester
.any_polygon_contains_coordinate(&Coordinate2D::new(9.9, 10.0), &Default::default()),);
assert!(!tester
.any_polygon_contains_coordinate(&Coordinate2D::new(-0.1, -0.1), &Default::default()),);
assert!(!tester
.any_polygon_contains_coordinate(&Coordinate2D::new(0.0, -0.1), &Default::default()),);
assert!(!tester
.any_polygon_contains_coordinate(&Coordinate2D::new(-0.1, 0.0), &Default::default()),);
assert!(!tester
.any_polygon_contains_coordinate(&Coordinate2D::new(10.1, 10.1), &Default::default()),);
assert!(!tester
.any_polygon_contains_coordinate(&Coordinate2D::new(10.1, 9.9), &Default::default()),);
assert!(!tester
.any_polygon_contains_coordinate(&Coordinate2D::new(9.9, 10.1), &Default::default()),);
}
#[tokio::test]
async fn all() -> Result<()> {
let points = MultiPointCollection::from_data(
MultiPoint::many(vec![(0.001, 0.1), (1.0, 1.1), (2.0, 3.1)]).unwrap(),
vec![TimeInterval::new_unchecked(0, 1); 3],
Default::default(),
)?;
let point_source = MockFeatureCollectionSource::single(points.clone()).boxed();
let polygon_source =
MockFeatureCollectionSource::single(MultiPolygonCollection::from_data(
vec![MultiPolygon::new(vec![vec![vec![
(0.0, 0.0).into(),
(10.0, 0.0).into(),
(10.0, 10.0).into(),
(0.0, 10.0).into(),
(0.0, 0.0).into(),
]]])?],
vec![TimeInterval::new_unchecked(0, 1); 1],
Default::default(),
)?)
.boxed();
let operator = PointInPolygonFilter {
params: PointInPolygonFilterParams {},
sources: PointInPolygonFilterSource {
points: point_source,
polygons: polygon_source,
},
}
.boxed()
.initialize(&MockExecutionContext::test_default())
.await?;
let query_processor = operator.query_processor()?.multi_point().unwrap();
let query_rectangle = VectorQueryRectangle {
spatial_bounds: BoundingBox2D::new((0., 0.).into(), (10., 10.).into()).unwrap(),
time_interval: TimeInterval::default(),
spatial_resolution: SpatialResolution::zero_point_one(),
};
let ctx = MockQueryContext::new(ChunkByteSize::MAX);
let query = query_processor.query(query_rectangle, &ctx).await.unwrap();
let result = query
.map(Result::unwrap)
.collect::<Vec<MultiPointCollection>>()
.await;
assert_eq!(result.len(), 1);
assert_eq!(result[0], points);
Ok(())
}
#[tokio::test]
async fn none() -> Result<()> {
let points = MultiPointCollection::from_data(
MultiPoint::many(vec![(0.0, 0.1), (1.0, 1.1), (2.0, 3.1)]).unwrap(),
vec![TimeInterval::new_unchecked(0, 1); 3],
Default::default(),
)?;
let point_source = MockFeatureCollectionSource::single(points.clone()).boxed();
let polygon_source = MockFeatureCollectionSource::single(
MultiPolygonCollection::from_data(vec![], vec![], Default::default())?,
)
.boxed();
let operator = PointInPolygonFilter {
params: PointInPolygonFilterParams {},
sources: PointInPolygonFilterSource {
points: point_source,
polygons: polygon_source,
},
}
.boxed()
.initialize(&MockExecutionContext::test_default())
.await?;
let query_processor = operator.query_processor()?.multi_point().unwrap();
let query_rectangle = VectorQueryRectangle {
spatial_bounds: BoundingBox2D::new((0., 0.).into(), (10., 10.).into()).unwrap(),
time_interval: TimeInterval::default(),
spatial_resolution: SpatialResolution::zero_point_one(),
};
let ctx = MockQueryContext::new(ChunkByteSize::MAX);
let query = query_processor.query(query_rectangle, &ctx).await.unwrap();
let result = query
.map(Result::unwrap)
.collect::<Vec<MultiPointCollection>>()
.await;
assert_eq!(result.len(), 0);
Ok(())
}
#[tokio::test]
async fn time() -> Result<()> {
let points = MultiPointCollection::from_data(
MultiPoint::many(vec![(1.0, 1.1), (2.0, 2.1), (3.0, 3.1)]).unwrap(),
vec![
TimeInterval::new(0, 1)?,
TimeInterval::new(5, 6)?,
TimeInterval::new(0, 5)?,
],
Default::default(),
)?;
let point_source = MockFeatureCollectionSource::single(points.clone()).boxed();
let polygon = MultiPolygon::new(vec![vec![vec![
(0.0, 0.0).into(),
(10.0, 0.0).into(),
(10.0, 10.0).into(),
(0.0, 10.0).into(),
(0.0, 0.0).into(),
]]])?;
let polygon_source =
MockFeatureCollectionSource::single(MultiPolygonCollection::from_data(
vec![polygon.clone(), polygon],
vec![TimeInterval::new(0, 1)?, TimeInterval::new(1, 2)?],
Default::default(),
)?)
.boxed();
let operator = PointInPolygonFilter {
params: PointInPolygonFilterParams {},
sources: PointInPolygonFilterSource {
points: point_source,
polygons: polygon_source,
},
}
.boxed()
.initialize(&MockExecutionContext::test_default())
.await?;
let query_processor = operator.query_processor()?.multi_point().unwrap();
let query_rectangle = VectorQueryRectangle {
spatial_bounds: BoundingBox2D::new((0., 0.).into(), (10., 10.).into()).unwrap(),
time_interval: TimeInterval::default(),
spatial_resolution: SpatialResolution::zero_point_one(),
};
let ctx = MockQueryContext::new(ChunkByteSize::MAX);
let query = query_processor.query(query_rectangle, &ctx).await.unwrap();
let result = query
.map(Result::unwrap)
.collect::<Vec<MultiPointCollection>>()
.await;
assert_eq!(result.len(), 1);
assert_eq!(result[0], points.filter(vec![true, false, true])?);
Ok(())
}
#[tokio::test]
async fn multiple_inputs() -> Result<()> {
let points1 = MultiPointCollection::from_data(
MultiPoint::many(vec![(5.0, 5.1), (15.0, 15.1)]).unwrap(),
vec![TimeInterval::new(0, 1)?; 2],
Default::default(),
)?;
let points2 = MultiPointCollection::from_data(
MultiPoint::many(vec![(6.0, 6.1), (16.0, 16.1)]).unwrap(),
vec![TimeInterval::new(1, 2)?; 2],
Default::default(),
)?;
let point_source =
MockFeatureCollectionSource::multiple(vec![points1.clone(), points2.clone()]).boxed();
let polygon1 = MultiPolygon::new(vec![vec![vec![
(0.0, 0.0).into(),
(10.0, 0.0).into(),
(10.0, 10.0).into(),
(0.0, 10.0).into(),
(0.0, 0.0).into(),
]]])?;
let polygon2 = MultiPolygon::new(vec![vec![vec![
(10.0, 10.0).into(),
(20.0, 10.0).into(),
(20.0, 20.0).into(),
(10.0, 20.0).into(),
(10.0, 10.0).into(),
]]])?;
let polygon_source = MockFeatureCollectionSource::multiple(vec![
MultiPolygonCollection::from_data(
vec![polygon1.clone()],
vec![TimeInterval::new(0, 1)?],
Default::default(),
)?,
MultiPolygonCollection::from_data(
vec![polygon1, polygon2],
vec![TimeInterval::new(1, 2)?, TimeInterval::new(1, 2)?],
Default::default(),
)?,
])
.boxed();
let operator = PointInPolygonFilter {
params: PointInPolygonFilterParams {},
sources: PointInPolygonFilterSource {
points: point_source,
polygons: polygon_source,
},
}
.boxed()
.initialize(&MockExecutionContext::test_default())
.await?;
let query_processor = operator.query_processor()?.multi_point().unwrap();
let query_rectangle = VectorQueryRectangle {
spatial_bounds: BoundingBox2D::new((0., 0.).into(), (10., 10.).into()).unwrap(),
time_interval: TimeInterval::default(),
spatial_resolution: SpatialResolution::zero_point_one(),
};
let ctx_one_chunk = MockQueryContext::new(ChunkByteSize::MAX);
let ctx_minimal_chunks = MockQueryContext::new(ChunkByteSize::MIN);
let query = query_processor
.query(query_rectangle, &ctx_minimal_chunks)
.await
.unwrap();
let result = query
.map(Result::unwrap)
.collect::<Vec<MultiPointCollection>>()
.await;
assert_eq!(result.len(), 2);
assert_eq!(result[0], points1.filter(vec![true, false])?);
assert_eq!(result[1], points2); | .await
.unwrap();
let result = query
.map(Result::unwrap)
.collect::<Vec<MultiPointCollection>>()
.await;
assert_eq!(result.len(), 1);
assert_eq!(
result[0],
points1.filter(vec![true, false])?.append(&points2)?
);
Ok(())
}
#[tokio::test]
async fn empty_points() {
let point_collection =
MultiPointCollection::from_data(vec![], vec![], Default::default()).unwrap();
let polygon_collection = MultiPolygonCollection::from_data(
vec![MultiPolygon::new(vec![vec![vec![
(0.0, 0.0).into(),
(10.0, 0.0).into(),
(10.0, 10.0).into(),
(0.0, 10.0).into(),
(0.0, 0.0).into(),
]]])
.unwrap()],
vec![TimeInterval::default()],
Default::default(),
)
.unwrap();
let operator = PointInPolygonFilter {
params: PointInPolygonFilterParams {},
sources: PointInPolygonFilterSource {
points: MockFeatureCollectionSource::single(point_collection).boxed(),
polygons: MockFeatureCollectionSource::single(polygon_collection).boxed(),
},
}
.boxed()
.initialize(&MockExecutionContext::test_default())
.await
.unwrap();
let query_rectangle = VectorQueryRectangle {
spatial_bounds: BoundingBox2D::new((-10., -10.).into(), (10., 10.).into()).unwrap(),
time_interval: TimeInterval::default(),
spatial_resolution: SpatialResolution::zero_point_one(),
};
let query_processor = operator.query_processor().unwrap().multi_point().unwrap();
let query_context = MockQueryContext::test_default();
let query = query_processor
.query(query_rectangle, &query_context)
.await
.unwrap();
let result = query
.map(Result::unwrap)
.collect::<Vec<MultiPointCollection>>()
.await;
assert_eq!(result.len(), 0);
}
} |
let query = query_processor
.query(query_rectangle, &ctx_one_chunk) |
enhance.py | import torch
import torch.nn as nn
import torch.nn.functional as F
from horch.common import tuplify
from horch.models.block import mb_conv_block, MBConv
from horch.models.detection.nasfpn import ReLUConvBN
from horch.models.modules import upsample_add, Conv2d, Sequential, Pool2d, upsample_concat
from horch.models.detection.nasfpn import NASFPN
from horch.models.utils import remove_stride_padding
class | (nn.Module):
def __init__(self, in_channels, f_channels, lite=False, aggregate='add'):
super().__init__()
self.aggregate = aggregate
self.lat = Conv2d(
in_channels, f_channels, kernel_size=1,
norm='default')
channels = f_channels * 2 if aggregate == 'cat' else f_channels
self.conv = Conv2d(
channels, f_channels, kernel_size=5 if lite else 3,
norm='default', act='default', depthwise_separable=lite)
def forward(self, c, p):
if self.aggregate == 'cat':
p = upsample_concat(p, self.lat(c))
else:
p = upsample_add(p, self.lat(c))
p = self.conv(p)
return p
class DeconvTopDown(nn.Module):
def __init__(self, in_channels1, in_channels2, f_channels, lite=False, aggregate='add'):
super().__init__()
self.aggregate = aggregate
self.lat = Conv2d(
in_channels1, f_channels, kernel_size=1,
norm='default')
self.deconv = Conv2d(in_channels2, f_channels, kernel_size=4, stride=2,
norm='default', depthwise_separable=lite, transposed=True)
channels = f_channels * 2 if aggregate == 'cat' else f_channels
self.conv = Conv2d(
channels, f_channels, kernel_size=5 if lite else 3,
norm='default', act='default', depthwise_separable=lite)
def forward(self, c, p):
if self.aggregate == 'cat':
p = torch.cat([self.lat(c), self.deconv(p)], dim=1)
else:
p = self.lat(c) + self.deconv(p)
p = self.conv(p)
return p
class FPNExtraLayers(nn.Module):
def __init__(self, in_channels, extra_layers=(6, 7), f_channels=None, downsample='conv', lite=False):
super().__init__()
self.extra_layers = nn.ModuleList([])
for _ in extra_layers:
if downsample == 'conv':
l = ReLUConvBN(in_channels, f_channels, stride=2, lite=lite)
elif downsample == 'maxpool':
l = Pool2d('max', kernel_size=1, stride=2)
elif downsample == 'avgpool':
l = Pool2d('avg', kernel_size=1, stride=2)
else:
raise ValueError("%s as downsampling is invalid." % downsample)
in_channels = f_channels
self.extra_layers.append(l)
def forward(self, p):
ps = []
for l in self.extra_layers:
p = l(p)
ps.append(p)
return tuple(ps)
class BasicBlock(nn.Sequential):
def __init__(self, in_channels, out_channels):
super().__init__()
self.conv1 = Conv2d(in_channels, out_channels // 2, kernel_size=1,
norm='default', act='default')
self.conv2 = Conv2d(out_channels // 2, out_channels, kernel_size=3, stride=2,
norm='default', act='default')
class ExtraLayers(nn.Module):
def __init__(self, in_channels_list, num_extra_layers=3, f_channels_list=(512, 256, 256), no_padding=0, block=BasicBlock, **kwargs):
super().__init__()
f_channels_list = tuplify(f_channels_list, num_extra_layers)
in_channels_list = list(in_channels_list)
self.extra_layers = nn.ModuleList([])
for f_channels in f_channels_list:
l = block(in_channels_list[-1], f_channels, **kwargs)
self.extra_layers.append(l)
in_channels_list.append(f_channels)
for i in range(no_padding, 0):
remove_stride_padding(self.extra_layers[i])
self.out_channels = in_channels_list
def forward(self, *cs):
ps = list(cs)
for l in self.extra_layers:
ps.append(l(ps[-1]))
return tuple(ps)
class SSDExtraLayers(ExtraLayers):
def __init__(self, in_channels_list, num_extra_layers=3, f_channels_list=(512, 256, 256), no_padding=-1):
super().__init__(
in_channels_list,
num_extra_layers,
f_channels_list,
no_padding,
BasicBlock
)
class SSDLiteExtraLayers(ExtraLayers):
def __init__(self, in_channels_list, num_extra_layers=3, f_channels_list=(512, 256, 256), no_padding=-1, kernel_size=3):
super().__init__(
in_channels_list,
num_extra_layers,
f_channels_list,
no_padding,
mb_conv_block,
expand_ratio=4,
kernel_size=kernel_size
)
class FPN(nn.Module):
r"""
Feature Pyramid Network which enhance features of different levels.
Parameters
----------
in_channels_list : sequence of ints
Number of input channels of every level, e.g., ``(256,512,1024)``
f_channels : int
Number of output channels.
extra_layers : tuple of ints
Extra layers to add, e.g., ``(6, 7)``
lite : bool
Whether to replace conv3x3 with depthwise seperable conv.
Default: False
upsample : str
Use bilinear upsampling when `interpolate` and ConvTransposed when `deconv`
Default: `interpolate`
"""
def __init__(self, in_channels_list, f_channels=256, extra_layers=(), downsample='conv', lite=False,
upsample='interpolate', aggregate='add'):
super().__init__()
self.lat = Conv2d(in_channels_list[-1], f_channels, kernel_size=1, norm='default')
self.extra_layers = extra_layers
if extra_layers:
self.extras = FPNExtraLayers(f_channels, extra_layers, f_channels, downsample=downsample, lite=lite)
if upsample == 'deconv':
self.topdowns = nn.ModuleList([
DeconvTopDown(c, f_channels, f_channels, lite=lite, aggregate=aggregate)
for c in in_channels_list[:-1]
])
else:
self.topdowns = nn.ModuleList([
TopDown(c, f_channels, lite=lite, aggregate=aggregate)
for c in in_channels_list[:-1]
])
self.out_channels = [f_channels] * (len(in_channels_list) + len(extra_layers))
def forward(self, *cs):
p = self.lat(cs[-1])
ps = (p,)
if self.extra_layers:
ps = ps + self.extras(p)
for c, topdown in zip(reversed(cs[:-1]), reversed(self.topdowns)):
p = topdown(c, ps[0])
ps = (p,) + ps
return ps
class BottomUp(nn.Module):
def __init__(self, f_channels, lite=False):
super().__init__()
self.down = Conv2d(
f_channels, f_channels, kernel_size=3, stride=2,
norm='default', act='default', depthwise_separable=lite)
self.conv = Conv2d(
f_channels, f_channels, kernel_size=3,
norm='default', act='default', depthwise_separable=lite)
def forward(self, p, n):
n = p + self.down(n)
n = self.conv(n)
return n
class FPN2(nn.Module):
r"""
Bottom-up path augmentation.
Parameters
----------
in_channels_list : sequence of ints
Number of input channels of every level, e.g., ``(256,256,256)``
Notice: they must be the same.
f_channels : int
Number of output channels.
"""
def __init__(self, in_channels_list, f_channels, lite=False):
super().__init__()
assert len(set(in_channels_list)) == 1, "Input channels of every level must be the same"
assert in_channels_list[0] == f_channels, "Input channels must be the same as `f_channels`"
self.bottomups = nn.ModuleList([
BottomUp(f_channels, lite=lite)
for _ in in_channels_list[1:]
])
self.out_channels = [f_channels] * len(in_channels_list)
def forward(self, *ps):
ns = [ps[0]]
for p, bottomup in zip(ps[1:], self.bottomups):
n = bottomup(p, ns[-1])
ns.append(n)
return tuple(ns)
class ContextEnhance(nn.Module):
def __init__(self, in_channels, out_channels):
super().__init__()
self.lats = nn.ModuleList([
Conv2d(c, out_channels, kernel_size=1, norm='default')
for c in in_channels
])
self.lat_glb = Conv2d(in_channels[-1], out_channels, kernel_size=1,
norm='default')
def forward(self, *cs):
size = cs[0].size()[2:4]
p = self.lats[0](cs[0])
for c, lat in zip(cs[1:], self.lats[1:]):
p += F.interpolate(lat(c), size=size, mode='bilinear', align_corners=False)
c_glb = F.adaptive_avg_pool2d(cs[-1], 1)
p_glb = self.lat_glb(c_glb)
p += p_glb
return p
def stacked_fpn(num_stacked, in_channels_list, extra_layers=(), f_channels=256, lite=False, upsample='interpolate'):
r"""
Stacked FPN with alternant top down block and bottom up block.
Parameters
----------
num_stacked : int
Number of stacked fpns.
in_channels_list : sequence of ints
Number of input channels of every level, e.g., ``(128,256,512)``
extra_layers : tuple of ints
Extra layers to add, e.g., ``(6, 7)``
f_channels : int
Number of feature (output) channels.
Default: 256
lite : bool
Whether to replace conv3x3 with depthwise seperable conv.
Default: False
upsample : str
Use bilinear upsampling if `interpolate` and ConvTransposed if `deconv`
Default: `interpolate`
"""
assert num_stacked >= 2, "Use FPN directly if `num_stacked` is smaller than 2."
layers = [FPN(in_channels_list, f_channels, extra_layers, lite=lite, upsample=upsample)]
for i in range(1, num_stacked):
if i % 2 == 0:
layers.append(FPN(layers[-1].out_channels, f_channels, lite=lite, upsample=upsample))
else:
layers.append(FPN2(layers[-1].out_channels, f_channels, lite=lite))
m = Sequential(*layers)
m.out_channels = m[-1].out_channels
return m
def stacked_nas_fpn(num_stacked, in_channels_list, extra_layers=(), f_channels=256, lite=False, upsample='interpolate'):
r"""
Stacked FPN with alternant top down block and bottom up block.
Parameters
----------
num_stacked : int
Number of stacked fpns.
in_channels_list : sequence of ints
Number of input channels of every level, e.g., ``(128,256,512)``
extra_layers : tuple of ints
Extra layers to add, e.g., ``(6, 7)``
f_channels : int
Number of feature (output) channels.
Default: 256
lite : bool
Whether to replace conv3x3 with depthwise seperable conv.
Default: False
upsample : str
Use bilinear upsampling if `interpolate` and ConvTransposed if `deconv`
Default: `interpolate`
"""
assert num_stacked >= 2, "Use FPN directly if `num_stacked` is smaller than 2."
layers = [FPN(in_channels_list, f_channels, extra_layers, downsample='maxpool', lite=lite, upsample=upsample)]
for i in range(1, num_stacked):
layers.append(NASFPN(f_channels))
m = Sequential(*layers)
m.out_channels = m[-1].out_channels
return m
class IDA(nn.Module):
def __init__(self, in_channels_list, f_channels, lite=False):
super().__init__()
self.num_levels = len(in_channels_list)
self.topdowns = nn.ModuleList([
DeconvTopDown(in_channels_list[i], in_channels_list[i + 1], f_channels, lite=lite)
for i in range(self.num_levels - 1)
])
if self.num_levels > 2:
self.deep = IDA([f_channels] * (self.num_levels - 1), f_channels)
def forward(self, *xs):
xs = [
l(xs[i], xs[i + 1]) for i, l in enumerate(self.topdowns)
]
if self.num_levels > 2:
return self.deep(*xs)
else:
return xs[0]
class IDA2(nn.Module):
def __init__(self, in_channels, lite=False):
super().__init__()
self.num_levels = len(in_channels)
self.topdowns = nn.ModuleList([
DeconvTopDown(in_channels[i], in_channels[i + 1], in_channels[i + 1], lite=lite)
for i in range(self.num_levels - 1)
])
if self.num_levels > 2:
self.deep = IDA2(in_channels[1:], lite=lite)
def forward(self, *xs):
xs = [
l(xs[i], xs[i + 1]) for i, l in enumerate(self.topdowns)
]
if self.num_levels > 2:
return self.deep(*xs)
else:
return xs[0]
class YOLOFPN(nn.Module):
def __init__(self, in_channels_list, f_channels_list=(256, 512, 1024), kernel_size=5):
super().__init__()
assert len(in_channels_list) == len(f_channels_list)
num_levels = len(in_channels_list)
self.convs = nn.ModuleList([])
self.lats = nn.ModuleList([])
self.outs = nn.ModuleList([])
for i in range(num_levels):
f_channels = f_channels_list[-(i+1)]
in_channels = in_channels_list[-(i+1)]
if i == 0:
self.convs.append(nn.Sequential(
MBConv(in_channels, in_channels, f_channels // 4, kernel_size=kernel_size),
MBConv(f_channels // 4, f_channels, f_channels // 4, kernel_size=kernel_size),
))
else:
self.lats.append(Conv2d(f_channels_list[-i] // 4, f_channels // 4, kernel_size=1,
norm='default'))
self.convs.append(nn.Sequential(
MBConv(in_channels + f_channels // 4, in_channels + f_channels // 4, f_channels // 4, kernel_size=kernel_size),
MBConv(f_channels // 4, f_channels, f_channels // 4, kernel_size=kernel_size),
))
self.outs.append(MBConv(f_channels // 4, f_channels, None, kernel_size=kernel_size))
self.out_channels = tuple(f_channels_list)
def forward(self, *cs):
ps = []
p1 = self.convs[0](cs[-1])
p2 = self.outs[0](p1)
ps.append(p2)
for lat, conv, out, c in zip(self.lats, self.convs[1:], self.outs[1:], reversed(cs[:-1])):
c = upsample_concat(lat(p1), c)
p1 = conv(c)
p2 = out(p1)
ps.append(p2)
return tuple(reversed(ps))
| TopDown |
dispatchers.go | package dockerfile // import "github.com/docker/docker/builder/dockerfile"
// This file contains the dispatchers for each command. Note that
// `nullDispatch` is not actually a command, but support for commands we parse
// but do nothing with.
//
// See evaluator.go for a higher level discussion of the whole evaluator
// package.
import (
"bytes"
"fmt"
"runtime"
"sort"
"strings"
"github.com/containerd/containerd/platforms"
"github.com/docker/docker/api"
"github.com/docker/docker/api/types/strslice"
"github.com/docker/docker/builder"
"github.com/docker/docker/errdefs"
"github.com/docker/docker/image"
"github.com/docker/docker/pkg/jsonmessage"
"github.com/docker/docker/pkg/signal"
"github.com/docker/docker/pkg/system"
"github.com/docker/go-connections/nat"
"github.com/moby/buildkit/frontend/dockerfile/instructions"
"github.com/moby/buildkit/frontend/dockerfile/parser"
"github.com/moby/buildkit/frontend/dockerfile/shell"
specs "github.com/opencontainers/image-spec/specs-go/v1"
"github.com/pkg/errors"
)
// ENV foo bar
//
// Sets the environment variable foo to bar, also makes interpolation
// in the dockerfile available from the next statement on via ${foo}.
//
func dispatchEnv(d dispatchRequest, c *instructions.EnvCommand) error {
runConfig := d.state.runConfig
commitMessage := bytes.NewBufferString("ENV")
for _, e := range c.Env {
name := e.Key
newVar := e.String()
commitMessage.WriteString(" " + newVar)
gotOne := false
for i, envVar := range runConfig.Env {
envParts := strings.SplitN(envVar, "=", 2)
compareFrom := envParts[0]
if shell.EqualEnvKeys(compareFrom, name) {
runConfig.Env[i] = newVar
gotOne = true
break
}
}
if !gotOne {
runConfig.Env = append(runConfig.Env, newVar)
}
}
return d.builder.commit(d.state, commitMessage.String())
}
// MAINTAINER some text <[email protected]>
//
// Sets the maintainer metadata.
func dispatchMaintainer(d dispatchRequest, c *instructions.MaintainerCommand) error {
d.state.maintainer = c.Maintainer
return d.builder.commit(d.state, "MAINTAINER "+c.Maintainer)
}
// LABEL some json data describing the image
//
// Sets the Label variable foo to bar,
//
func dispatchLabel(d dispatchRequest, c *instructions.LabelCommand) error {
if d.state.runConfig.Labels == nil {
d.state.runConfig.Labels = make(map[string]string)
}
commitStr := "LABEL"
for _, v := range c.Labels {
d.state.runConfig.Labels[v.Key] = v.Value
commitStr += " " + v.String()
}
return d.builder.commit(d.state, commitStr)
}
// ADD foo /path
//
// Add the file 'foo' to '/path'. Tarball and Remote URL (http, https) handling
// exist here. If you do not wish to have this automatic handling, use COPY.
//
func dispatchAdd(d dispatchRequest, c *instructions.AddCommand) error {
if c.Chmod != "" {
return errors.New("the --chmod option requires BuildKit. Refer to https://docs.docker.com/go/buildkit/ to learn how to build images with BuildKit enabled")
}
downloader := newRemoteSourceDownloader(d.builder.Output, d.builder.Stdout)
copier := copierFromDispatchRequest(d, downloader, nil)
defer copier.Cleanup()
copyInstruction, err := copier.createCopyInstruction(c.SourcesAndDest, "ADD")
if err != nil {
return err
}
copyInstruction.chownStr = c.Chown
copyInstruction.allowLocalDecompression = true
return d.builder.performCopy(d, copyInstruction)
}
// COPY foo /path
//
// Same as 'ADD' but without the tar and remote url handling.
//
func dispatchCopy(d dispatchRequest, c *instructions.CopyCommand) error {
if c.Chmod != "" {
return errors.New("the --chmod option requires BuildKit. Refer to https://docs.docker.com/go/buildkit/ to learn how to build images with BuildKit enabled")
}
var im *imageMount
var err error
if c.From != "" {
im, err = d.getImageMount(c.From)
if err != nil {
return errors.Wrapf(err, "invalid from flag value %s", c.From)
}
} | if err != nil {
return err
}
copyInstruction.chownStr = c.Chown
if c.From != "" && copyInstruction.chownStr == "" {
copyInstruction.preserveOwnership = true
}
return d.builder.performCopy(d, copyInstruction)
}
func (d *dispatchRequest) getImageMount(imageRefOrID string) (*imageMount, error) {
if imageRefOrID == "" {
// TODO: this could return the source in the default case as well?
return nil, nil
}
var localOnly bool
stage, err := d.stages.get(imageRefOrID)
if err != nil {
return nil, err
}
if stage != nil {
imageRefOrID = stage.Image
localOnly = true
}
return d.builder.imageSources.Get(imageRefOrID, localOnly, d.builder.platform)
}
// FROM [--platform=platform] imagename[:tag | @digest] [AS build-stage-name]
//
func initializeStage(d dispatchRequest, cmd *instructions.Stage) error {
d.builder.imageProber.Reset()
var platform *specs.Platform
if v := cmd.Platform; v != "" {
v, err := d.getExpandedString(d.shlex, v)
if err != nil {
return errors.Wrapf(err, "failed to process arguments for platform %s", v)
}
p, err := platforms.Parse(v)
if err != nil {
return errors.Wrapf(err, "failed to parse platform %s", v)
}
if err := system.ValidatePlatform(p); err != nil {
return err
}
platform = &p
}
image, err := d.getFromImage(d.shlex, cmd.BaseName, platform)
if err != nil {
return err
}
state := d.state
if err := state.beginStage(cmd.Name, image); err != nil {
return err
}
if len(state.runConfig.OnBuild) > 0 {
triggers := state.runConfig.OnBuild
state.runConfig.OnBuild = nil
return dispatchTriggeredOnBuild(d, triggers)
}
return nil
}
func dispatchTriggeredOnBuild(d dispatchRequest, triggers []string) error {
fmt.Fprintf(d.builder.Stdout, "# Executing %d build trigger", len(triggers))
if len(triggers) > 1 {
fmt.Fprint(d.builder.Stdout, "s")
}
fmt.Fprintln(d.builder.Stdout)
for _, trigger := range triggers {
d.state.updateRunConfig()
ast, err := parser.Parse(strings.NewReader(trigger))
if err != nil {
return err
}
if len(ast.AST.Children) != 1 {
return errors.New("onbuild trigger should be a single expression")
}
cmd, err := instructions.ParseCommand(ast.AST.Children[0])
if err != nil {
var uiErr *instructions.UnknownInstruction
if errors.As(err, &uiErr) {
buildsFailed.WithValues(metricsUnknownInstructionError).Inc()
}
return err
}
err = dispatch(d, cmd)
if err != nil {
return err
}
}
return nil
}
func (d *dispatchRequest) getExpandedString(shlex *shell.Lex, str string) (string, error) {
substitutionArgs := []string{}
for key, value := range d.state.buildArgs.GetAllMeta() {
substitutionArgs = append(substitutionArgs, key+"="+value)
}
name, err := shlex.ProcessWord(str, substitutionArgs)
if err != nil {
return "", err
}
return name, nil
}
func (d *dispatchRequest) getImageOrStage(name string, platform *specs.Platform) (builder.Image, error) {
var localOnly bool
if im, ok := d.stages.getByName(name); ok {
name = im.Image
localOnly = true
}
if platform == nil {
platform = d.builder.platform
}
// Windows cannot support a container with no base image unless it is LCOW.
if name == api.NoBaseImageSpecifier {
p := platforms.DefaultSpec()
if platform != nil {
p = *platform
}
imageImage := &image.Image{}
imageImage.OS = p.OS
// old windows scratch handling
// TODO: scratch should not have an os. It should be nil image.
// Windows supports scratch. What is not supported is running containers
// from it.
if runtime.GOOS == "windows" {
if platform == nil || platform.OS == "linux" {
if !system.LCOWSupported() {
return nil, errors.New("Linux containers are not supported on this system")
}
imageImage.OS = "linux"
} else if platform.OS == "windows" {
return nil, errors.New("Windows does not support FROM scratch")
} else {
return nil, errors.Errorf("platform %s is not supported", platforms.Format(p))
}
}
return builder.Image(imageImage), nil
}
imageMount, err := d.builder.imageSources.Get(name, localOnly, platform)
if err != nil {
return nil, err
}
return imageMount.Image(), nil
}
func (d *dispatchRequest) getFromImage(shlex *shell.Lex, basename string, platform *specs.Platform) (builder.Image, error) {
name, err := d.getExpandedString(shlex, basename)
if err != nil {
return nil, err
}
// Empty string is interpreted to FROM scratch by images.GetImageAndReleasableLayer,
// so validate expanded result is not empty.
if name == "" {
return nil, errors.Errorf("base name (%s) should not be blank", basename)
}
return d.getImageOrStage(name, platform)
}
func dispatchOnbuild(d dispatchRequest, c *instructions.OnbuildCommand) error {
d.state.runConfig.OnBuild = append(d.state.runConfig.OnBuild, c.Expression)
return d.builder.commit(d.state, "ONBUILD "+c.Expression)
}
// WORKDIR /tmp
//
// Set the working directory for future RUN/CMD/etc statements.
//
func dispatchWorkdir(d dispatchRequest, c *instructions.WorkdirCommand) error {
runConfig := d.state.runConfig
var err error
runConfig.WorkingDir, err = normalizeWorkdir(d.state.operatingSystem, runConfig.WorkingDir, c.Path)
if err != nil {
return err
}
// For performance reasons, we explicitly do a create/mkdir now
// This avoids having an unnecessary expensive mount/unmount calls
// (on Windows in particular) during each container create.
// Prior to 1.13, the mkdir was deferred and not executed at this step.
if d.builder.disableCommit {
// Don't call back into the daemon if we're going through docker commit --change "WORKDIR /foo".
// We've already updated the runConfig and that's enough.
return nil
}
comment := "WORKDIR " + runConfig.WorkingDir
runConfigWithCommentCmd := copyRunConfig(runConfig, withCmdCommentString(comment, d.state.operatingSystem))
containerID, err := d.builder.probeAndCreate(d.state, runConfigWithCommentCmd)
if err != nil || containerID == "" {
return err
}
if err := d.builder.docker.ContainerCreateWorkdir(containerID); err != nil {
return err
}
return d.builder.commitContainer(d.state, containerID, runConfigWithCommentCmd)
}
// RUN some command yo
//
// run a command and commit the image. Args are automatically prepended with
// the current SHELL which defaults to 'sh -c' under linux or 'cmd /S /C' under
// Windows, in the event there is only one argument The difference in processing:
//
// RUN echo hi # sh -c echo hi (Linux and LCOW)
// RUN echo hi # cmd /S /C echo hi (Windows)
// RUN [ "echo", "hi" ] # echo hi
//
func dispatchRun(d dispatchRequest, c *instructions.RunCommand) error {
if !system.IsOSSupported(d.state.operatingSystem) {
return system.ErrNotSupportedOperatingSystem
}
if len(c.FlagsUsed) > 0 {
// classic builder RUN currently does not support any flags, so fail on the first one
return errors.Errorf("the --%s option requires BuildKit. Refer to https://docs.docker.com/go/buildkit/ to learn how to build images with BuildKit enabled", c.FlagsUsed[0])
}
stateRunConfig := d.state.runConfig
cmdFromArgs, argsEscaped := resolveCmdLine(c.ShellDependantCmdLine, stateRunConfig, d.state.operatingSystem, c.Name(), c.String())
buildArgs := d.state.buildArgs.FilterAllowed(stateRunConfig.Env)
saveCmd := cmdFromArgs
if len(buildArgs) > 0 {
saveCmd = prependEnvOnCmd(d.state.buildArgs, buildArgs, cmdFromArgs)
}
runConfigForCacheProbe := copyRunConfig(stateRunConfig,
withCmd(saveCmd),
withArgsEscaped(argsEscaped),
withEntrypointOverride(saveCmd, nil))
if hit, err := d.builder.probeCache(d.state, runConfigForCacheProbe); err != nil || hit {
return err
}
runConfig := copyRunConfig(stateRunConfig,
withCmd(cmdFromArgs),
withArgsEscaped(argsEscaped),
withEnv(append(stateRunConfig.Env, buildArgs...)),
withEntrypointOverride(saveCmd, strslice.StrSlice{""}),
withoutHealthcheck())
cID, err := d.builder.create(runConfig)
if err != nil {
return err
}
if err := d.builder.containerManager.Run(d.builder.clientCtx, cID, d.builder.Stdout, d.builder.Stderr); err != nil {
if err, ok := err.(*statusCodeError); ok {
// TODO: change error type, because jsonmessage.JSONError assumes HTTP
msg := fmt.Sprintf(
"The command '%s' returned a non-zero code: %d",
strings.Join(runConfig.Cmd, " "), err.StatusCode())
if err.Error() != "" {
msg = fmt.Sprintf("%s: %s", msg, err.Error())
}
return &jsonmessage.JSONError{
Message: msg,
Code: err.StatusCode(),
}
}
return err
}
// Don't persist the argsEscaped value in the committed image. Use the original
// from previous build steps (only CMD and ENTRYPOINT persist this).
if d.state.operatingSystem == "windows" {
runConfigForCacheProbe.ArgsEscaped = stateRunConfig.ArgsEscaped
}
return d.builder.commitContainer(d.state, cID, runConfigForCacheProbe)
}
// Derive the command to use for probeCache() and to commit in this container.
// Note that we only do this if there are any build-time env vars. Also, we
// use the special argument "|#" at the start of the args array. This will
// avoid conflicts with any RUN command since commands can not
// start with | (vertical bar). The "#" (number of build envs) is there to
// help ensure proper cache matches. We don't want a RUN command
// that starts with "foo=abc" to be considered part of a build-time env var.
//
// remove any unreferenced built-in args from the environment variables.
// These args are transparent so resulting image should be the same regardless
// of the value.
func prependEnvOnCmd(buildArgs *BuildArgs, buildArgVars []string, cmd strslice.StrSlice) strslice.StrSlice {
var tmpBuildEnv []string
for _, env := range buildArgVars {
key := strings.SplitN(env, "=", 2)[0]
if buildArgs.IsReferencedOrNotBuiltin(key) {
tmpBuildEnv = append(tmpBuildEnv, env)
}
}
sort.Strings(tmpBuildEnv)
tmpEnv := append([]string{fmt.Sprintf("|%d", len(tmpBuildEnv))}, tmpBuildEnv...)
return strslice.StrSlice(append(tmpEnv, cmd...))
}
// CMD foo
//
// Set the default command to run in the container (which may be empty).
// Argument handling is the same as RUN.
//
func dispatchCmd(d dispatchRequest, c *instructions.CmdCommand) error {
runConfig := d.state.runConfig
cmd, argsEscaped := resolveCmdLine(c.ShellDependantCmdLine, runConfig, d.state.operatingSystem, c.Name(), c.String())
// We warn here as Windows shell processing operates differently to Linux.
// Linux: /bin/sh -c "echo hello" world --> hello
// Windows: cmd /s /c "echo hello" world --> hello world
if d.state.operatingSystem == "windows" &&
len(runConfig.Entrypoint) > 0 &&
d.state.runConfig.ArgsEscaped != argsEscaped {
fmt.Fprintf(d.builder.Stderr, " ---> [Warning] Shell-form ENTRYPOINT and exec-form CMD may have unexpected results\n")
}
runConfig.Cmd = cmd
runConfig.ArgsEscaped = argsEscaped
if err := d.builder.commit(d.state, fmt.Sprintf("CMD %q", cmd)); err != nil {
return err
}
if len(c.ShellDependantCmdLine.CmdLine) != 0 {
d.state.cmdSet = true
}
return nil
}
// HEALTHCHECK foo
//
// Set the default healthcheck command to run in the container (which may be empty).
// Argument handling is the same as RUN.
//
func dispatchHealthcheck(d dispatchRequest, c *instructions.HealthCheckCommand) error {
runConfig := d.state.runConfig
if runConfig.Healthcheck != nil {
oldCmd := runConfig.Healthcheck.Test
if len(oldCmd) > 0 && oldCmd[0] != "NONE" {
fmt.Fprintf(d.builder.Stdout, "Note: overriding previous HEALTHCHECK: %v\n", oldCmd)
}
}
runConfig.Healthcheck = c.Health
return d.builder.commit(d.state, fmt.Sprintf("HEALTHCHECK %q", runConfig.Healthcheck))
}
// ENTRYPOINT /usr/sbin/nginx
//
// Set the entrypoint to /usr/sbin/nginx. Will accept the CMD as the arguments
// to /usr/sbin/nginx. Uses the default shell if not in JSON format.
//
// Handles command processing similar to CMD and RUN, only req.runConfig.Entrypoint
// is initialized at newBuilder time instead of through argument parsing.
//
func dispatchEntrypoint(d dispatchRequest, c *instructions.EntrypointCommand) error {
runConfig := d.state.runConfig
cmd, argsEscaped := resolveCmdLine(c.ShellDependantCmdLine, runConfig, d.state.operatingSystem, c.Name(), c.String())
// This warning is a little more complex than in dispatchCmd(), as the Windows base images (similar
// universally to almost every Linux image out there) have a single .Cmd field populated so that
// `docker run --rm image` starts the default shell which would typically be sh on Linux,
// or cmd on Windows. The catch to this is that if a dockerfile had `CMD ["c:\\windows\\system32\\cmd.exe"]`,
// we wouldn't be able to tell the difference. However, that would be highly unlikely, and besides, this
// is only trying to give a helpful warning of possibly unexpected results.
if d.state.operatingSystem == "windows" &&
d.state.runConfig.ArgsEscaped != argsEscaped &&
((len(runConfig.Cmd) == 1 && strings.ToLower(runConfig.Cmd[0]) != `c:\windows\system32\cmd.exe` && len(runConfig.Shell) == 0) || (len(runConfig.Cmd) > 1)) {
fmt.Fprintf(d.builder.Stderr, " ---> [Warning] Shell-form CMD and exec-form ENTRYPOINT may have unexpected results\n")
}
runConfig.Entrypoint = cmd
runConfig.ArgsEscaped = argsEscaped
if !d.state.cmdSet {
runConfig.Cmd = nil
}
return d.builder.commit(d.state, fmt.Sprintf("ENTRYPOINT %q", runConfig.Entrypoint))
}
// EXPOSE 6667/tcp 7000/tcp
//
// Expose ports for links and port mappings. This all ends up in
// req.runConfig.ExposedPorts for runconfig.
//
func dispatchExpose(d dispatchRequest, c *instructions.ExposeCommand, envs []string) error {
// custom multi word expansion
// expose $FOO with FOO="80 443" is expanded as EXPOSE [80,443]. This is the only command supporting word to words expansion
// so the word processing has been de-generalized
ports := []string{}
for _, p := range c.Ports {
ps, err := d.shlex.ProcessWords(p, envs)
if err != nil {
return err
}
ports = append(ports, ps...)
}
c.Ports = ports
ps, _, err := nat.ParsePortSpecs(ports)
if err != nil {
return err
}
if d.state.runConfig.ExposedPorts == nil {
d.state.runConfig.ExposedPorts = make(nat.PortSet)
}
for p := range ps {
d.state.runConfig.ExposedPorts[p] = struct{}{}
}
return d.builder.commit(d.state, "EXPOSE "+strings.Join(c.Ports, " "))
}
// USER foo
//
// Set the user to 'foo' for future commands and when running the
// ENTRYPOINT/CMD at container run time.
//
func dispatchUser(d dispatchRequest, c *instructions.UserCommand) error {
d.state.runConfig.User = c.User
return d.builder.commit(d.state, fmt.Sprintf("USER %v", c.User))
}
// VOLUME /foo
//
// Expose the volume /foo for use. Will also accept the JSON array form.
//
func dispatchVolume(d dispatchRequest, c *instructions.VolumeCommand) error {
if d.state.runConfig.Volumes == nil {
d.state.runConfig.Volumes = map[string]struct{}{}
}
for _, v := range c.Volumes {
if v == "" {
return errors.New("VOLUME specified can not be an empty string")
}
d.state.runConfig.Volumes[v] = struct{}{}
}
return d.builder.commit(d.state, fmt.Sprintf("VOLUME %v", c.Volumes))
}
// STOPSIGNAL signal
//
// Set the signal that will be used to kill the container.
func dispatchStopSignal(d dispatchRequest, c *instructions.StopSignalCommand) error {
_, err := signal.ParseSignal(c.Signal)
if err != nil {
return errdefs.InvalidParameter(err)
}
d.state.runConfig.StopSignal = c.Signal
return d.builder.commit(d.state, fmt.Sprintf("STOPSIGNAL %v", c.Signal))
}
// ARG name[=value]
//
// Adds the variable foo to the trusted list of variables that can be passed
// to builder using the --build-arg flag for expansion/substitution or passing to 'run'.
// Dockerfile author may optionally set a default value of this variable.
func dispatchArg(d dispatchRequest, c *instructions.ArgCommand) error {
var commitStr strings.Builder
commitStr.WriteString("ARG ")
for i, arg := range c.Args {
if i > 0 {
commitStr.WriteString(" ")
}
commitStr.WriteString(arg.Key)
if arg.Value != nil {
commitStr.WriteString("=")
commitStr.WriteString(*arg.Value)
}
d.state.buildArgs.AddArg(arg.Key, arg.Value)
}
return d.builder.commit(d.state, commitStr.String())
}
// SHELL powershell -command
//
// Set the non-default shell to use.
func dispatchShell(d dispatchRequest, c *instructions.ShellCommand) error {
d.state.runConfig.Shell = c.Shell
return d.builder.commit(d.state, fmt.Sprintf("SHELL %v", d.state.runConfig.Shell))
} | copier := copierFromDispatchRequest(d, errOnSourceDownload, im)
defer copier.Cleanup()
copyInstruction, err := copier.createCopyInstruction(c.SourcesAndDest, "COPY") |
table.go | package orm
import (
"database/sql"
"encoding/json"
"fmt"
"net"
"reflect"
"strings"
"time"
"github.com/go-pg/pg/internal"
"github.com/go-pg/pg/internal/iszero"
"github.com/go-pg/pg/internal/tag"
"github.com/go-pg/pg/types"
)
const (
AfterQueryHookFlag = uint16(1) << iota
BeforeSelectQueryHookFlag
AfterSelectHookFlag
BeforeInsertHookFlag
AfterInsertHookFlag
BeforeUpdateHookFlag
AfterUpdateHookFlag
BeforeDeleteHookFlag
AfterDeleteHookFlag
discardUnknownColumns
)
var timeType = reflect.TypeOf((*time.Time)(nil)).Elem()
var nullTimeType = reflect.TypeOf((*types.NullTime)(nil)).Elem()
var ipType = reflect.TypeOf((*net.IP)(nil)).Elem()
var ipNetType = reflect.TypeOf((*net.IPNet)(nil)).Elem()
var scannerType = reflect.TypeOf((*sql.Scanner)(nil)).Elem()
var nullBoolType = reflect.TypeOf((*sql.NullBool)(nil)).Elem()
var nullFloatType = reflect.TypeOf((*sql.NullFloat64)(nil)).Elem()
var nullIntType = reflect.TypeOf((*sql.NullInt64)(nil)).Elem()
var nullStringType = reflect.TypeOf((*sql.NullString)(nil)).Elem()
var jsonRawMessageType = reflect.TypeOf((*json.RawMessage)(nil)).Elem()
// Table represents a SQL table created from Go struct.
type Table struct {
Type reflect.Type
zeroStruct reflect.Value
TypeName string
Alias types.Q
ModelName string
Name string
FullName types.Q
FullNameForSelects types.Q
allFields []*Field // read only
skippedFields []*Field
Fields []*Field // PKs + DataFields
PKs []*Field
DataFields []*Field
FieldsMap map[string]*Field
Methods map[string]*Method
Relations map[string]*Relation
Unique map[string][]*Field
SoftDeleteField *Field
flags uint16
}
func (t *Table) setName(name types.Q) {
t.FullName = name
t.FullNameForSelects = name
if t.Alias == "" {
t.Alias = name
}
}
func newTable(typ reflect.Type) *Table {
t := new(Table)
t.Type = typ
t.zeroStruct = reflect.New(t.Type).Elem()
t.TypeName = internal.ToExported(t.Type.Name())
t.ModelName = internal.Underscore(t.Type.Name())
t.Name = tableNameInflector(t.ModelName)
t.setName(types.Q(types.AppendField(nil, t.Name, 1)))
t.Alias = types.Q(types.AppendField(nil, t.ModelName, 1))
typ = reflect.PtrTo(t.Type)
if typ.Implements(afterQueryHookType) {
t.SetFlag(AfterQueryHookFlag)
}
if typ.Implements(beforeSelectQueryHookType) {
t.SetFlag(BeforeSelectQueryHookFlag)
}
if typ.Implements(afterSelectHookType) {
t.SetFlag(AfterSelectHookFlag)
}
if typ.Implements(beforeInsertHookType) {
t.SetFlag(BeforeInsertHookFlag)
}
if typ.Implements(afterInsertHookType) {
t.SetFlag(AfterInsertHookFlag)
}
if typ.Implements(beforeUpdateHookType) {
t.SetFlag(BeforeUpdateHookFlag)
}
if typ.Implements(afterUpdateHookType) {
t.SetFlag(AfterUpdateHookFlag)
}
if typ.Implements(beforeDeleteHookType) {
t.SetFlag(BeforeDeleteHookFlag)
}
if typ.Implements(afterDeleteHookType) {
t.SetFlag(AfterDeleteHookFlag)
}
t.initFields()
t.initMethods()
return t
}
func (t *Table) String() string {
return "model=" + t.TypeName
}
func (t *Table) SetFlag(flag uint16) {
t.flags |= flag
}
func (t *Table) HasFlag(flag uint16) bool {
if t == nil {
return false
}
return t.flags&flag != 0
}
func (t *Table) HasField(field string) bool {
_, err := t.GetField(field)
return err == nil
}
func (t *Table) checkPKs() error {
if len(t.PKs) == 0 {
return fmt.Errorf("pg: %s does not have primary keys", t)
}
return nil
}
func (t *Table) mustSoftDelete() error {
if t.SoftDeleteField == nil {
return fmt.Errorf("pg: %s does not support soft deletes", t)
}
return nil
}
func (t *Table) AddField(field *Field) {
t.Fields = append(t.Fields, field)
if field.HasFlag(PrimaryKeyFlag) {
t.PKs = append(t.PKs, field)
} else {
t.DataFields = append(t.DataFields, field)
}
t.FieldsMap[field.SQLName] = field
}
func (t *Table) RemoveField(field *Field) {
t.Fields = removeField(t.Fields, field)
if field.HasFlag(PrimaryKeyFlag) {
t.PKs = removeField(t.PKs, field)
} else {
t.DataFields = removeField(t.DataFields, field)
}
delete(t.FieldsMap, field.SQLName)
}
func | (fields []*Field, field *Field) []*Field {
for i, f := range fields {
if f == field {
fields = append(fields[:i], fields[i+1:]...)
}
}
return fields
}
func (t *Table) GetField(fieldName string) (*Field, error) {
field, ok := t.FieldsMap[fieldName]
if !ok {
return nil, fmt.Errorf("can't find column=%s in %s", fieldName, t)
}
return field, nil
}
func (t *Table) AppendParam(b []byte, strct reflect.Value, name string) ([]byte, bool) {
field, ok := t.FieldsMap[name]
if ok {
b = field.AppendValue(b, strct, 1)
return b, true
}
method, ok := t.Methods[name]
if ok {
b = method.AppendValue(b, strct.Addr(), 1)
return b, true
}
return b, false
}
func (t *Table) initFields() {
t.Fields = make([]*Field, 0, t.Type.NumField())
t.FieldsMap = make(map[string]*Field, t.Type.NumField())
t.addFields(t.Type, nil)
}
func (t *Table) addFields(typ reflect.Type, baseIndex []int) {
for i := 0; i < typ.NumField(); i++ {
f := typ.Field(i)
// Make a copy so slice is not shared between fields.
index := make([]int, len(baseIndex))
copy(index, baseIndex)
if f.Anonymous {
sqlTag := f.Tag.Get("sql")
if sqlTag == "-" {
continue
}
fieldType := indirectType(f.Type)
t.addFields(fieldType, append(index, f.Index...))
pgTag := tag.Parse(f.Tag.Get("pg"))
_, inherit := pgTag.Options["inherit"]
_, override := pgTag.Options["override"]
if inherit || override {
embeddedTable := newTable(fieldType)
t.TypeName = embeddedTable.TypeName
t.FullName = embeddedTable.FullName
t.FullNameForSelects = embeddedTable.FullNameForSelects
t.Alias = embeddedTable.Alias
t.ModelName = embeddedTable.ModelName
}
continue
}
field := t.newField(f, index)
if field != nil {
t.AddField(field)
}
}
}
func (t *Table) newField(f reflect.StructField, index []int) *Field {
sqlTag := tag.Parse(f.Tag.Get("sql"))
switch f.Name {
case "tableName", "TableName":
if len(index) > 0 {
return nil
}
if sqlTag.Name == "_" {
t.setName("")
} else if sqlTag.Name != "" {
s, _ := tag.Unquote(sqlTag.Name)
t.setName(types.Q(quoteTableName(s)))
}
if v, ok := sqlTag.Options["select"]; ok {
v, _ = tag.Unquote(v)
t.FullNameForSelects = types.Q(quoteTableName(v))
}
if v, ok := sqlTag.Options["alias"]; ok {
v, _ = tag.Unquote(v)
t.Alias = types.Q(quoteTableName(v))
}
pgTag := tag.Parse(f.Tag.Get("pg"))
if _, ok := pgTag.Options["discard_unknown_columns"]; ok {
t.SetFlag(discardUnknownColumns)
}
return nil
}
if f.PkgPath != "" {
return nil
}
skip := sqlTag.Name == "-"
if skip || sqlTag.Name == "" {
sqlTag.Name = internal.Underscore(f.Name)
}
index = append(index, f.Index...)
if field, ok := t.FieldsMap[sqlTag.Name]; ok {
if indexEqual(field.Index, index) {
return field
}
t.RemoveField(field)
}
field := &Field{
Field: f,
Type: indirectType(f.Type),
GoName: f.Name,
SQLName: sqlTag.Name,
Column: types.Q(types.AppendField(nil, sqlTag.Name, 1)),
Index: index,
}
if _, ok := sqlTag.Options["notnull"]; ok {
field.SetFlag(NotNullFlag)
}
if v, ok := sqlTag.Options["unique"]; ok {
if v == "" {
field.SetFlag(UniqueFlag)
} else {
if t.Unique == nil {
t.Unique = make(map[string][]*Field)
}
t.Unique[v] = append(t.Unique[v], field)
}
}
if v, ok := sqlTag.Options["default"]; ok {
v, ok = tag.Unquote(v)
if ok {
field.Default = types.Q(types.AppendString(nil, v, 1))
} else {
field.Default = types.Q(v)
}
}
if _, ok := sqlTag.Options["pk"]; ok {
field.SetFlag(PrimaryKeyFlag)
} else if strings.HasSuffix(field.SQLName, "_id") ||
strings.HasSuffix(field.SQLName, "_uuid") {
field.SetFlag(ForeignKeyFlag)
} else if strings.HasPrefix(field.SQLName, "fk_") {
field.SetFlag(ForeignKeyFlag)
} else if len(t.PKs) == 0 {
if field.SQLName == "id" ||
field.SQLName == "uuid" ||
field.SQLName == "pk_"+t.ModelName {
field.SetFlag(PrimaryKeyFlag)
}
}
pgTag := tag.Parse(f.Tag.Get("pg"))
if _, ok := sqlTag.Options["array"]; ok {
field.SetFlag(ArrayFlag)
} else if _, ok := pgTag.Options["array"]; ok {
field.SetFlag(ArrayFlag)
}
field.SQLType = fieldSQLType(field, pgTag, sqlTag)
if strings.HasSuffix(field.SQLType, "[]") {
field.SetFlag(ArrayFlag)
}
if v, ok := sqlTag.Options["on_delete"]; ok {
field.OnDelete = v
}
if v, ok := sqlTag.Options["composite"]; ok {
field.SQLType = v
field.append = compositeAppender(f.Type)
field.scan = compositeScanner(f.Type)
} else if _, ok := pgTag.Options["json_use_number"]; ok {
field.append = types.Appender(f.Type)
field.scan = scanJSONValue
} else if field.HasFlag(ArrayFlag) {
field.append = types.ArrayAppender(f.Type)
field.scan = types.ArrayScanner(f.Type)
} else if _, ok := sqlTag.Options["hstore"]; ok {
field.append = types.HstoreAppender(f.Type)
field.scan = types.HstoreScanner(f.Type)
} else if _, ok := pgTag.Options["hstore"]; ok {
field.append = types.HstoreAppender(f.Type)
field.scan = types.HstoreScanner(f.Type)
} else {
field.append = types.Appender(f.Type)
field.scan = types.Scanner(f.Type)
}
field.isZero = iszero.Checker(f.Type)
t.allFields = append(t.allFields, field)
if skip {
t.skippedFields = append(t.skippedFields, field)
t.FieldsMap[field.SQLName] = field
return nil
}
if _, ok := pgTag.Options["soft_delete"]; ok {
switch field.Type {
case timeType, nullTimeType:
t.SoftDeleteField = field
default:
err := fmt.Errorf(
"soft_delete is only supported for time.Time and pg.NullTime")
panic(err)
}
}
return field
}
func (t *Table) initMethods() {
t.Methods = make(map[string]*Method)
typ := reflect.PtrTo(t.Type)
for i := 0; i < typ.NumMethod(); i++ {
m := typ.Method(i)
if m.PkgPath != "" {
continue
}
if m.Type.NumIn() > 1 {
continue
}
if m.Type.NumOut() != 1 {
continue
}
retType := m.Type.Out(0)
t.Methods[m.Name] = &Method{
Index: m.Index,
appender: types.Appender(retType),
}
}
}
func (t *Table) init() {
t.initRelations()
t.initInlines()
}
func (t *Table) initRelations() {
for i := 0; i < len(t.Fields); {
f := t.Fields[i]
if t.tryRelation(f) {
t.Fields = removeField(t.Fields, f)
t.DataFields = removeField(t.DataFields, f)
} else {
i++
}
}
}
func (t *Table) initInlines() {
for _, f := range t.skippedFields {
if f.Type.Kind() == reflect.Struct {
joinTable := _tables.get(f.Type, true)
t.inlineFields(f, joinTable, nil)
}
}
}
func (t *Table) tryRelation(field *Field) bool {
if isColumn(field.Type) {
return false
}
switch field.Type.Kind() {
case reflect.Slice:
return t.tryRelationSlice(field)
case reflect.Struct:
return t.tryRelationStruct(field)
}
return false
}
func (t *Table) tryRelationSlice(field *Field) bool {
elemType := indirectType(field.Type.Elem())
if elemType.Kind() != reflect.Struct {
return false
}
pgTag := tag.Parse(field.Field.Tag.Get("pg"))
joinTable := _tables.get(elemType, true)
fk, fkOK := pgTag.Options["fk"]
if fkOK {
if fk == "-" {
return false
}
fk = tryUnderscorePrefix(fk)
}
if m2mTableName, _ := pgTag.Options["many2many"]; m2mTableName != "" {
m2mTable := _tables.getByName(m2mTableName)
var m2mTableAlias types.Q
if m2mTable != nil {
m2mTableAlias = m2mTable.Alias
} else if ind := strings.IndexByte(m2mTableName, '.'); ind >= 0 {
m2mTableAlias = types.Q(m2mTableName[ind+1:])
} else {
m2mTableAlias = types.Q(m2mTableName)
}
var fks []string
if !fkOK {
fk = t.ModelName + "_"
}
if m2mTable != nil {
keys := foreignKeys(t, m2mTable, fk, fkOK)
if len(keys) == 0 {
return false
}
for _, fk := range keys {
fks = append(fks, fk.SQLName)
}
} else {
if fkOK && len(t.PKs) == 1 {
fks = append(fks, fk)
} else {
for _, pk := range t.PKs {
fks = append(fks, fk+pk.SQLName)
}
}
}
joinFK, joinFKOK := pgTag.Options["joinFK"]
if joinFKOK {
joinFK = tryUnderscorePrefix(joinFK)
} else {
joinFK = joinTable.ModelName + "_"
}
var joinFKs []string
if m2mTable != nil {
keys := foreignKeys(joinTable, m2mTable, joinFK, joinFKOK)
if len(keys) == 0 {
return false
}
for _, fk := range keys {
joinFKs = append(joinFKs, fk.SQLName)
}
} else {
if joinFKOK && len(joinTable.PKs) == 1 {
joinFKs = append(joinFKs, joinFK)
} else {
for _, pk := range joinTable.PKs {
joinFKs = append(joinFKs, joinFK+pk.SQLName)
}
}
}
t.addRelation(&Relation{
Type: Many2ManyRelation,
Field: field,
JoinTable: joinTable,
M2MTableName: types.Q(m2mTableName),
M2MTableAlias: m2mTableAlias,
BaseFKs: fks,
JoinFKs: joinFKs,
})
return true
}
s, polymorphic := pgTag.Options["polymorphic"]
var typeField *Field
if polymorphic {
fk = tryUnderscorePrefix(s)
typeField = joinTable.getField(fk + "type")
if typeField == nil {
return false
}
} else if !fkOK {
fk = t.ModelName + "_"
}
fks := foreignKeys(t, joinTable, fk, fkOK || polymorphic)
if len(fks) == 0 {
return false
}
var fkValues []*Field
fkValue, ok := pgTag.Options["fk_value"]
if ok {
if len(fks) > 1 {
panic(fmt.Errorf("got fk_value, but there are %d fks", len(fks)))
}
f := t.getField(fkValue)
if f == nil {
panic(fmt.Errorf("fk_value=%q not found in %s", fkValue, t))
}
fkValues = append(fkValues, f)
} else {
fkValues = t.PKs
}
if len(fks) != len(fkValues) {
panic("len(fks) != len(fkValues)")
}
if len(fks) > 0 {
t.addRelation(&Relation{
Type: HasManyRelation,
Field: field,
JoinTable: joinTable,
FKs: fks,
Polymorphic: typeField,
FKValues: fkValues,
})
return true
}
return false
}
func (t *Table) tryRelationStruct(field *Field) bool {
pgTag := tag.Parse(field.Field.Tag.Get("pg"))
joinTable := _tables.get(field.Type, true)
if len(joinTable.allFields) == 0 {
return false
}
res := t.tryHasOne(joinTable, field, pgTag) ||
t.tryBelongsToOne(joinTable, field, pgTag)
t.inlineFields(field, joinTable, nil)
return res
}
func (t *Table) inlineFields(strct *Field, joinTable *Table, path map[*Table]struct{}) {
if path == nil {
path = make(map[*Table]struct{}, 0)
}
path[joinTable] = struct{}{}
for _, f := range joinTable.allFields {
f = f.Copy()
f.GoName = strct.GoName + "_" + f.GoName
f.SQLName = strct.SQLName + "__" + f.SQLName
f.Column = types.Q(types.AppendField(nil, f.SQLName, 1))
f.Index = appendNew(strct.Index, f.Index...)
if _, ok := t.FieldsMap[f.SQLName]; !ok {
t.FieldsMap[f.SQLName] = f
}
if f.Type.Kind() != reflect.Struct {
continue
}
tt := _tables.get(f.Type, true)
if _, ok := path[tt]; !ok {
t.inlineFields(f, tt, path)
}
}
}
func appendNew(dst []int, src ...int) []int {
cp := make([]int, len(dst)+len(src))
copy(cp, dst)
copy(cp[len(dst):], src)
return cp
}
func isPostgresKeyword(s string) bool {
switch strings.ToLower(s) {
case "user", "group", "constraint", "limit",
"member", "placing", "references", "table":
return true
default:
return false
}
}
func isColumn(typ reflect.Type) bool {
return typ.Implements(scannerType) || reflect.PtrTo(typ).Implements(scannerType)
}
func fieldSQLType(field *Field, pgTag, sqlTag *tag.Tag) string {
if typ, ok := sqlTag.Options["type"]; ok {
field.SetFlag(customTypeFlag)
typ, _ = tag.Unquote(typ)
return typ
}
if _, ok := sqlTag.Options["hstore"]; ok {
field.SetFlag(customTypeFlag)
return "hstore"
} else if _, ok := pgTag.Options["hstore"]; ok {
field.SetFlag(customTypeFlag)
return "hstore"
}
if field.HasFlag(ArrayFlag) && field.Type.Kind() == reflect.Slice {
sqlType := sqlType(field.Type.Elem())
return sqlType + "[]"
}
sqlType := sqlType(field.Type)
if field.HasFlag(PrimaryKeyFlag) {
return pkSQLType(sqlType)
}
switch sqlType {
case "timestamptz":
field.SetFlag(customTypeFlag)
}
return sqlType
}
func sqlType(typ reflect.Type) string {
switch typ {
case timeType:
return "timestamptz"
case ipType:
return "inet"
case ipNetType:
return "cidr"
case nullBoolType:
return "boolean"
case nullFloatType:
return "double precision"
case nullIntType:
return "bigint"
case nullStringType:
return "text"
case jsonRawMessageType:
return "jsonb"
}
switch typ.Kind() {
case reflect.Int8, reflect.Uint8, reflect.Int16:
return "smallint"
case reflect.Uint16, reflect.Int32:
return "integer"
case reflect.Uint32, reflect.Int64, reflect.Int:
return "bigint"
case reflect.Uint, reflect.Uint64:
// The lesser of two evils.
return "bigint"
case reflect.Float32:
return "real"
case reflect.Float64:
return "double precision"
case reflect.Bool:
return "boolean"
case reflect.String:
return "text"
case reflect.Map, reflect.Struct:
return "jsonb"
case reflect.Array, reflect.Slice:
if typ.Elem().Kind() == reflect.Uint8 {
return "bytea"
}
return "jsonb"
default:
return typ.Kind().String()
}
}
func pkSQLType(s string) string {
switch s {
case "smallint":
return "smallserial"
case "integer":
return "serial"
case "bigint":
return "bigserial"
}
return s
}
func sqlTypeEqual(a, b string) bool {
if a == b {
return true
}
return pkSQLType(a) == pkSQLType(b)
}
func (t *Table) tryHasOne(joinTable *Table, field *Field, pgTag *tag.Tag) bool {
fk, fkOK := pgTag.Options["fk"]
if fkOK {
if fk == "-" {
return false
}
fk = tryUnderscorePrefix(fk)
} else {
fk = internal.Underscore(field.GoName) + "_"
}
fks := foreignKeys(joinTable, t, fk, fkOK)
if len(fks) > 0 {
t.addRelation(&Relation{
Type: HasOneRelation,
Field: field,
FKs: fks,
JoinTable: joinTable,
})
return true
}
return false
}
func (t *Table) tryBelongsToOne(joinTable *Table, field *Field, pgTag *tag.Tag) bool {
fk, fkOK := pgTag.Options["fk"]
if fkOK {
if fk == "-" {
return false
}
fk = tryUnderscorePrefix(fk)
} else {
fk = internal.Underscore(t.TypeName) + "_"
}
fks := foreignKeys(t, joinTable, fk, fkOK)
if len(fks) > 0 {
t.addRelation(&Relation{
Type: BelongsToRelation,
Field: field,
FKs: fks,
JoinTable: joinTable,
})
return true
}
return false
}
func (t *Table) addRelation(rel *Relation) {
if t.Relations == nil {
t.Relations = make(map[string]*Relation)
}
_, ok := t.Relations[rel.Field.GoName]
if ok {
panic(fmt.Errorf("%s already has %s", t, rel))
}
t.Relations[rel.Field.GoName] = rel
}
func foreignKeys(base, join *Table, fk string, tryFK bool) []*Field {
var fks []*Field
for _, pk := range base.PKs {
fkName := fk + pk.SQLName
f := join.getField(fkName)
if f != nil && sqlTypeEqual(pk.SQLType, f.SQLType) {
fks = append(fks, f)
continue
}
if strings.IndexByte(pk.SQLName, '_') == -1 {
continue
}
f = join.getField(pk.SQLName)
if f != nil && sqlTypeEqual(pk.SQLType, f.SQLType) {
fks = append(fks, f)
continue
}
}
if len(fks) > 0 && len(fks) == len(base.PKs) {
return fks
}
fks = nil
for _, pk := range base.PKs {
if !strings.HasPrefix(pk.SQLName, "pk_") {
continue
}
fkName := "fk_" + pk.SQLName[3:]
f := join.getField(fkName)
if f != nil && sqlTypeEqual(pk.SQLType, f.SQLType) {
fks = append(fks, f)
}
}
if len(fks) > 0 && len(fks) == len(base.PKs) {
return fks
}
if fk == "" || len(base.PKs) != 1 {
return nil
}
if tryFK {
f := join.getField(fk)
if f != nil && sqlTypeEqual(base.PKs[0].SQLType, f.SQLType) {
return []*Field{f}
}
}
for _, suffix := range []string{"id", "uuid"} {
f := join.getField(fk + suffix)
if f != nil && sqlTypeEqual(base.PKs[0].SQLType, f.SQLType) {
return []*Field{f}
}
}
return nil
}
func (t *Table) getField(name string) *Field {
return t.FieldsMap[name]
}
func scanJSONValue(v reflect.Value, rd types.Reader, n int) error {
if !v.CanSet() {
return fmt.Errorf("pg: Scan(non-pointer %s)", v.Type())
}
if n == -1 {
v.Set(reflect.New(v.Type()).Elem())
return nil
}
dec := json.NewDecoder(rd)
dec.UseNumber()
return dec.Decode(v.Addr().Interface())
}
func tryUnderscorePrefix(s string) string {
if s == "" {
return s
}
if c := s[0]; internal.IsUpper(c) {
return internal.Underscore(s) + "_"
}
return s
}
func quoteTableName(s string) string {
if isPostgresKeyword(s) {
return `"` + s + `"`
}
return s
}
| removeField |
51387d8fda8d_add_default_value_to_is_invited.py | """add default value to is_invited
Revision ID: 51387d8fda8d
Revises: 6779bebb64e6
Create Date: 2021-12-21 18:19:50.864781
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '51387d8fda8d'
down_revision = '6779bebb64e6'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
|
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| pass
# ### end Alembic commands ### |
test_gecko.py | import sys
from flaky import flaky
from .tools import SRC_ROOT, AutomaticBaseTest, ExplicitBaseTest, NO_FILE, NO_LINK_FILE
sys.path.append(SRC_ROOT)
import webdrivermanager # noqa: E402 I001
class GeckoDriverManagerTestsWithAutomaticLocations(AutomaticBaseTest):
DRIVER_MANAGER = webdrivermanager.GeckoDriverManager
@flaky
def test_download(self):
|
@flaky
def test_download_and_install(self):
self.instance = self.DRIVER_MANAGER()
driver_link_target, driver_binary = self.instance.download_and_install(show_progress_bar=False)
self.assertTrue(driver_binary.is_file(), NO_FILE)
self.assertTrue(driver_link_target.is_file(), NO_LINK_FILE)
class GeckoDriverManagerTestsWithExplicitLocations(ExplicitBaseTest):
DRIVER_MANAGER = webdrivermanager.GeckoDriverManager
@flaky
def test_download(self):
self.instance = self.DRIVER_MANAGER(download_root=self.temp_dir.name)
filename = self.instance.download(show_progress_bar=False)
self.assertTrue(filename.is_file(), NO_FILE)
@flaky
def test_download_and_install(self):
link_path = self.make_link_dir()
self.instance = self.DRIVER_MANAGER(download_root=self.temp_dir.name, link_path=link_path)
driver_link_target, driver_binary = self.instance.download_and_install(show_progress_bar=False)
self.assertTrue(driver_binary.is_file(), NO_FILE)
self.assertTrue(driver_link_target.is_file(), NO_LINK_FILE)
| self.instance = self.DRIVER_MANAGER()
filename = self.instance.download(show_progress_bar=False)
self.assertTrue(filename.is_file(), NO_FILE) |
uploadMedia.ts | import Twitter from 'twitter'
import {isString} from 'util'
import * as fs from 'fs'
import * as core from '@actions/core'
export async function uploadMedia(mediaPaths: string[]): Promise<string[]> {
return new Promise(async (resolve, reject) => {
core.debug(JSON.stringify(mediaPaths))
for (const path of mediaPaths) {
if (!isString(path)) {
throw new Error('media path not a string')
}
if (!fs.existsSync(path)) {
throw new Error(`${path} not exists`)
}
}
const consumer_key = process.env.CONSUMER_API_KEY as string
const consumer_secret = process.env.CONSUMER_API_SECRET_KEY as string | consumer_key,
consumer_secret,
access_token_key,
access_token_secret
})
try {
const promises = mediaPaths.map(async path => {
const media = fs.readFileSync(path)
// TODO: chunked
return await client.post('media/upload', {media})
})
const responses = await Promise.all(promises)
resolve(
responses.map(x => {
core.debug(`ResponseData: ${JSON.stringify(x)}`)
return x.media_id_string
})
)
} catch (error) {
reject(new Error('upload failed'))
}
})
} | const access_token_key = process.env.ACCESS_TOKEN as string
const access_token_secret = process.env.ACCESS_TOKEN_SECRET as string
const client = new Twitter({ |
mod.rs | use git_odb::linked::Store;
use crate::fixture_path;
fn db() -> Store {
Store::at(fixture_path("objects")).expect("valid object path")
}
mod iter {
use crate::odb::store::linked::db;
#[test]
fn arc_iter() {
let db = std::sync::Arc::new(db());
let _ = db.arc_iter();
}
#[test]
fn a_bunch_of_loose_and_packed_objects() -> crate::Result {
let db = db();
let iter = db.iter();
assert_eq!(
iter.size_hint(),
(139, None),
"we only count packs and have no upper bound"
);
assert_eq!(iter.count(), 146, "it sees the correct amount of objects");
for id in db.iter() {
assert!(db.contains(id?), "each object exists");
}
Ok(())
}
}
mod locate {
use crate::{hex_to_id, odb::store::linked::db};
use git_odb::{linked::Store, pack, Find};
fn can_locate(db: &Store, hex_id: &str) {
let mut buf = vec![];
assert!(db
.find(hex_to_id(hex_id), &mut buf, &mut pack::cache::Never)
.expect("no read error")
.is_some());
}
#[test]
fn loose_object() {
can_locate(&db(), "37d4e6c5c48ba0d245164c4e10d5f41140cab980");
}
#[test]
fn pack_object() {
can_locate(&db(), "501b297447a8255d3533c6858bb692575cdefaa0"); // pack 11fd
can_locate(&db(), "4dac9989f96bc5b5b1263b582c08f0c5f0b58542"); // pack a2bf
can_locate(&db(), "dd25c539efbb0ab018caa4cda2d133285634e9b5"); // pack c043
}
}
mod init {
use git_odb::linked;
use std::convert::TryFrom;
use crate::odb::alternate::alternate;
use crate::odb::store::linked::db;
#[test]
fn | () -> crate::Result {
let tmp = git_testtools::tempfile::TempDir::new()?;
let (object_path, linked_object_path) = alternate(tmp.path().join("a"), tmp.path().join("b"))?;
let db = linked::Store::try_from(object_path.clone())?;
assert_eq!(db.dbs.len(), 2);
assert_eq!(db.dbs[0].loose.path, object_path);
assert_eq!(db.dbs[1].loose.path, linked_object_path);
Ok(())
}
#[test]
fn a_linked_db_without_alternates() -> crate::Result {
let tmp = git_testtools::tempfile::TempDir::new()?;
let db = linked::Store::at(tmp.path())?;
assert_eq!(db.dbs.len(), 1);
assert_eq!(db.dbs[0].loose.path, tmp.path());
Ok(())
}
#[test]
fn has_packs() {
let db = db();
assert_eq!(db.dbs.len(), 1);
assert_eq!(db.dbs[0].bundles.len(), 3)
}
}
| multiple_linked_repositories_via_alternates |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.