file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
Ratio.rs
|
#![allow(unused_imports, non_camel_case_types)]
use crate::models::r5::Extension::Extension;
use crate::models::r5::Quantity::Quantity;
use serde_json::json;
use serde_json::value::Value;
use std::borrow::Cow;
/// A relationship of two Quantity values - expressed as a numerator and a
/// denominator.
#[derive(Debug)]
pub struct Ratio<'a> {
pub(crate) value: Cow<'a, Value>,
}
impl Ratio<'_> {
pub fn new(value: &Value) -> Ratio {
Ratio {
value: Cow::Borrowed(value),
}
}
pub fn to_json(&self) -> Value {
(*self.value).clone()
}
/// The value of the denominator.
pub fn denominator(&self) -> Option<Quantity> {
if let Some(val) = self.value.get("denominator") {
return Some(Quantity {
value: Cow::Borrowed(val),
});
}
return None;
}
/// May be used to represent additional information that is not part of the basic
/// definition of the element. To make the use of extensions safe and manageable,
/// there is a strict set of governance applied to the definition and use of
/// extensions. Though any implementer can define an extension, there is a set of
/// requirements that SHALL be met as part of the definition of the extension.
pub fn extension(&self) -> Option<Vec<Extension>> {
if let Some(Value::Array(val)) = self.value.get("extension") {
return Some(
val.into_iter()
.map(|e| Extension {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Unique id for the element within a resource (for internal references). This may be
/// any string value that does not contain spaces.
pub fn id(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("id") {
return Some(string);
}
return None;
}
/// The value of the numerator.
pub fn numerator(&self) -> Option<Quantity> {
if let Some(val) = self.value.get("numerator") {
return Some(Quantity {
value: Cow::Borrowed(val),
});
}
return None;
}
pub fn validate(&self) -> bool {
if let Some(_val) = self.denominator() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.extension() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.id() {}
if let Some(_val) = self.numerator() {
if !_val.validate() {
return false;
}
}
return true;
}
}
#[derive(Debug)]
pub struct RatioBuilder {
pub(crate) value: Value,
}
impl RatioBuilder {
pub fn build(&self) -> Ratio {
Ratio {
value: Cow::Owned(self.value.clone()),
}
}
pub fn with(existing: Ratio) -> RatioBuilder {
RatioBuilder {
value: (*existing.value).clone(),
}
}
pub fn new() -> RatioBuilder
|
pub fn denominator<'a>(&'a mut self, val: Quantity) -> &'a mut RatioBuilder {
self.value["denominator"] = json!(val.value);
return self;
}
pub fn extension<'a>(&'a mut self, val: Vec<Extension>) -> &'a mut RatioBuilder {
self.value["extension"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn id<'a>(&'a mut self, val: &str) -> &'a mut RatioBuilder {
self.value["id"] = json!(val);
return self;
}
pub fn numerator<'a>(&'a mut self, val: Quantity) -> &'a mut RatioBuilder {
self.value["numerator"] = json!(val.value);
return self;
}
}
|
{
let mut __value: Value = json!({});
return RatioBuilder { value: __value };
}
|
cfg1.rs
|
#[doc = "Reader of register CFG1"]
pub type R = crate::R<u32, super::CFG1>;
#[doc = "Writer for register CFG1"]
pub type W = crate::W<u32, super::CFG1>;
#[doc = "Register CFG1 `reset()`'s with value 0x0100_0000"]
impl crate::ResetValue for super::CFG1 {
#[inline(always)]
fn reset_value() -> Self::Ux { 0x0100_0000 }
}
#[doc = "Reader of field `SRC_PER`"]
pub type SRC_PER_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `SRC_PER`"]
pub struct SRC_PER_W<'a> { w: &'a mut W }
impl<'a> SRC_PER_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f);
self.w
}
}
#[doc = "Reader of field `DST_PER`"]
pub type DST_PER_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DST_PER`"]
pub struct DST_PER_W<'a> { w: &'a mut W }
impl<'a> DST_PER_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 4)) | (((value as u32) & 0x0f) << 4);
self.w
}
}
#[doc = "Software or Hardware Selection for the Source\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SRC_H2SEL_A { #[doc = "0: Software handshaking interface is used to trigger a transfer request."] SW = 0, #[doc = "1: Hardware handshaking interface is used to trigger a transfer request."] HW = 1 }
impl From<SRC_H2SEL_A> for bool {
#[inline(always)]
fn from(variant: SRC_H2SEL_A) -> Self { variant as u8 != 0 }
}
#[doc = "Reader of field `SRC_H2SEL`"]
pub type SRC_H2SEL_R = crate::R<bool, SRC_H2SEL_A>;
impl SRC_H2SEL_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SRC_H2SEL_A {
match self.bits {
false => SRC_H2SEL_A::SW,
true => SRC_H2SEL_A::HW,
}
}
#[doc = "Checks if the value of the field is `SW`"]
#[inline(always)]
pub fn is_sw(&self) -> bool { *self == SRC_H2SEL_A::SW }
#[doc = "Checks if the value of the field is `HW`"]
#[inline(always)]
pub fn is_hw(&self) -> bool { *self == SRC_H2SEL_A::HW }
}
#[doc = "Write proxy for field `SRC_H2SEL`"]
pub struct SRC_H2SEL_W<'a> { w: &'a mut W }
impl<'a> SRC_H2SEL_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SRC_H2SEL_A) -> &'a mut W { { self.bit(variant.into()) } }
#[doc = "Software handshaking interface is used to trigger a transfer request."]
#[inline(always)]
pub fn sw(self) -> &'a mut W { self.variant(SRC_H2SEL_A::SW) }
#[doc = "Hardware handshaking interface is used to trigger a transfer request."]
#[inline(always)]
pub fn hw(self) -> &'a mut W { self.variant(SRC_H2SEL_A::HW) }
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
#[doc = "Software or Hardware Selection for the Destination\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DST_H2SEL_A { #[doc = "0: Software handshaking interface is used to trigger a transfer request."] SW = 0, #[doc = "1: Hardware handshaking interface is used to trigger a transfer request."] HW = 1 }
impl From<DST_H2SEL_A> for bool {
#[inline(always)]
fn from(variant: DST_H2SEL_A) -> Self { variant as u8 != 0 }
}
#[doc = "Reader of field `DST_H2SEL`"]
pub type DST_H2SEL_R = crate::R<bool, DST_H2SEL_A>;
impl DST_H2SEL_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DST_H2SEL_A {
match self.bits {
false => DST_H2SEL_A::SW,
true => DST_H2SEL_A::HW,
}
}
#[doc = "Checks if the value of the field is `SW`"]
#[inline(always)]
pub fn is_sw(&self) -> bool { *self == DST_H2SEL_A::SW }
#[doc = "Checks if the value of the field is `HW`"]
#[inline(always)]
pub fn is_hw(&self) -> bool { *self == DST_H2SEL_A::HW }
}
#[doc = "Write proxy for field `DST_H2SEL`"]
pub struct DST_H2SEL_W<'a> { w: &'a mut W }
impl<'a> DST_H2SEL_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DST_H2SEL_A) -> &'a mut W { { self.bit(variant.into()) } }
#[doc = "Software handshaking interface is used to trigger a transfer request."]
#[inline(always)]
pub fn sw(self) -> &'a mut W { self.variant(DST_H2SEL_A::SW) }
#[doc = "Hardware handshaking interface is used to trigger a transfer request."]
#[inline(always)]
pub fn hw(self) -> &'a mut W { self.variant(DST_H2SEL_A::HW) }
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);
self.w
}
}
#[doc = "Stop On Done\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SOD_A { #[doc = "0: STOP ON DONE disabled, the descriptor fetch operation ignores DONE Field of CTRLA register."] DISABLE = 0, #[doc = "1: STOP ON DONE activated, the DMAC module is automatically disabled if DONE FIELD is set to 1."] ENABLE = 1 }
impl From<SOD_A> for bool {
#[inline(always)]
fn from(variant: SOD_A) -> Self { variant as u8 != 0 }
}
#[doc = "Reader of field `SOD`"]
pub type SOD_R = crate::R<bool, SOD_A>;
impl SOD_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SOD_A {
match self.bits {
false => SOD_A::DISABLE,
true => SOD_A::ENABLE,
}
}
#[doc = "Checks if the value of the field is `DISABLE`"]
#[inline(always)]
pub fn is_disable(&self) -> bool { *self == SOD_A::DISABLE }
#[doc = "Checks if the value of the field is `ENABLE`"]
#[inline(always)]
pub fn is_enable(&self) -> bool { *self == SOD_A::ENABLE }
}
#[doc = "Write proxy for field `SOD`"]
pub struct SOD_W<'a> { w: &'a mut W }
impl<'a> SOD_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SOD_A) -> &'a mut W { { self.bit(variant.into()) } }
#[doc = "STOP ON DONE disabled, the descriptor fetch operation ignores DONE Field of CTRLA register."]
#[inline(always)]
pub fn disable(self) -> &'a mut W { self.variant(SOD_A::DISABLE) }
#[doc = "STOP ON DONE activated, the DMAC module is automatically disabled if DONE FIELD is set to 1."]
#[inline(always)]
pub fn enable(self) -> &'a mut W { self.variant(SOD_A::ENABLE) }
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);
self.w
}
}
#[doc = "Interface Lock\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LOCK_IF_A { #[doc = "0: Interface Lock capability is disabled"] DISABLE = 0, #[doc = "1: Interface Lock capability is enabled"] ENABLE = 1 }
impl From<LOCK_IF_A> for bool {
#[inline(always)]
fn from(variant: LOCK_IF_A) -> Self { variant as u8 != 0 }
}
#[doc = "Reader of field `LOCK_IF`"]
pub type LOCK_IF_R = crate::R<bool, LOCK_IF_A>;
impl LOCK_IF_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LOCK_IF_A {
match self.bits {
false => LOCK_IF_A::DISABLE,
true => LOCK_IF_A::ENABLE,
}
}
#[doc = "Checks if the value of the field is `DISABLE`"]
#[inline(always)]
pub fn is_disable(&self) -> bool { *self == LOCK_IF_A::DISABLE }
#[doc = "Checks if the value of the field is `ENABLE`"]
#[inline(always)]
pub fn is_enable(&self) -> bool { *self == LOCK_IF_A::ENABLE }
}
#[doc = "Write proxy for field `LOCK_IF`"]
pub struct LOCK_IF_W<'a> { w: &'a mut W }
impl<'a> LOCK_IF_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LOCK_IF_A) -> &'a mut W { { self.bit(variant.into()) } }
#[doc = "Interface Lock capability is disabled"]
#[inline(always)]
pub fn disable(self) -> &'a mut W { self.variant(LOCK_IF_A::DISABLE) }
#[doc = "Interface Lock capability is enabled"]
#[inline(always)]
pub fn enable(self) -> &'a mut W { self.variant(LOCK_IF_A::ENABLE) }
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20);
self.w
}
}
#[doc = "Bus Lock\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LOCK_B_A { #[doc = "0: AHB Bus Locking capability is disabled."] DISABLE = 0 }
impl From<LOCK_B_A> for bool {
#[inline(always)]
fn from(variant: LOCK_B_A) -> Self { variant as u8 != 0 }
}
#[doc = "Reader of field `LOCK_B`"]
pub type LOCK_B_R = crate::R<bool, LOCK_B_A>;
impl LOCK_B_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<bool, LOCK_B_A> {
use crate::Variant::*;
match self.bits {
false => Val(LOCK_B_A::DISABLE),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `DISABLE`"]
#[inline(always)]
pub fn is_disable(&self) -> bool { *self == LOCK_B_A::DISABLE }
}
#[doc = "Write proxy for field `LOCK_B`"]
pub struct LOCK_B_W<'a> { w: &'a mut W }
impl<'a> LOCK_B_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LOCK_B_A) -> &'a mut W { { self.bit(variant.into()) } }
#[doc = "AHB Bus Locking capability is disabled."]
#[inline(always)]
pub fn disable(self) -> &'a mut W { self.variant(LOCK_B_A::DISABLE) }
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 21)) | (((value as u32) & 0x01) << 21);
self.w
}
}
#[doc = "Master Interface Arbiter Lock\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LOCK_IF_L_A { #[doc = "0: The Master Interface Arbiter is locked by the channel x for a chunk transfer."] CHUNK = 0, #[doc = "1: The Master Interface Arbiter is locked by the channel x for a buffer transfer."] BUFFER = 1 }
impl From<LOCK_IF_L_A> for bool {
#[inline(always)]
fn from(variant: LOCK_IF_L_A) -> Self { variant as u8 != 0 }
|
#[doc = "Reader of field `LOCK_IF_L`"]
pub type LOCK_IF_L_R = crate::R<bool, LOCK_IF_L_A>;
impl LOCK_IF_L_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LOCK_IF_L_A {
match self.bits {
false => LOCK_IF_L_A::CHUNK,
true => LOCK_IF_L_A::BUFFER,
}
}
#[doc = "Checks if the value of the field is `CHUNK`"]
#[inline(always)]
pub fn is_chunk(&self) -> bool { *self == LOCK_IF_L_A::CHUNK }
#[doc = "Checks if the value of the field is `BUFFER`"]
#[inline(always)]
pub fn is_buffer(&self) -> bool { *self == LOCK_IF_L_A::BUFFER }
}
#[doc = "Write proxy for field `LOCK_IF_L`"]
pub struct LOCK_IF_L_W<'a> { w: &'a mut W }
impl<'a> LOCK_IF_L_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LOCK_IF_L_A) -> &'a mut W { { self.bit(variant.into()) } }
#[doc = "The Master Interface Arbiter is locked by the channel x for a chunk transfer."]
#[inline(always)]
pub fn chunk(self) -> &'a mut W { self.variant(LOCK_IF_L_A::CHUNK) }
#[doc = "The Master Interface Arbiter is locked by the channel x for a buffer transfer."]
#[inline(always)]
pub fn buffer(self) -> &'a mut W { self.variant(LOCK_IF_L_A::BUFFER) }
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W { self.bit(true) }
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W { self.bit(false) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 22)) | (((value as u32) & 0x01) << 22);
self.w
}
}
#[doc = "Reader of field `AHB_PROT`"]
pub type AHB_PROT_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `AHB_PROT`"]
pub struct AHB_PROT_W<'a> { w: &'a mut W }
impl<'a> AHB_PROT_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 24)) | (((value as u32) & 0x07) << 24);
self.w
}
}
#[doc = "FIFO Configuration\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum FIFOCFG_A { #[doc = "0: The largest defined length AHB burst is performed on the destination AHB interface."] ALAP_CFG = 0, #[doc = "1: When half FIFO size is available/filled, a source/destination request is serviced."] HALF_CFG = 1, #[doc = "2: When there is enough space/data available to perform a single AHB access, then the request is serviced."] ASAP_CFG = 2 }
impl From<FIFOCFG_A> for u8 {
#[inline(always)]
fn from(variant: FIFOCFG_A) -> Self { variant as _ }
}
#[doc = "Reader of field `FIFOCFG`"]
pub type FIFOCFG_R = crate::R<u8, FIFOCFG_A>;
impl FIFOCFG_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<u8, FIFOCFG_A> {
use crate::Variant::*;
match self.bits {
0 => Val(FIFOCFG_A::ALAP_CFG),
1 => Val(FIFOCFG_A::HALF_CFG),
2 => Val(FIFOCFG_A::ASAP_CFG),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `ALAP_CFG`"]
#[inline(always)]
pub fn is_alap_cfg(&self) -> bool { *self == FIFOCFG_A::ALAP_CFG }
#[doc = "Checks if the value of the field is `HALF_CFG`"]
#[inline(always)]
pub fn is_half_cfg(&self) -> bool { *self == FIFOCFG_A::HALF_CFG }
#[doc = "Checks if the value of the field is `ASAP_CFG`"]
#[inline(always)]
pub fn is_asap_cfg(&self) -> bool { *self == FIFOCFG_A::ASAP_CFG }
}
#[doc = "Write proxy for field `FIFOCFG`"]
pub struct FIFOCFG_W<'a> { w: &'a mut W }
impl<'a> FIFOCFG_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: FIFOCFG_A) -> &'a mut W { unsafe { self.bits(variant.into()) } }
#[doc = "The largest defined length AHB burst is performed on the destination AHB interface."]
#[inline(always)]
pub fn alap_cfg(self) -> &'a mut W { self.variant(FIFOCFG_A::ALAP_CFG) }
#[doc = "When half FIFO size is available/filled, a source/destination request is serviced."]
#[inline(always)]
pub fn half_cfg(self) -> &'a mut W { self.variant(FIFOCFG_A::HALF_CFG) }
#[doc = "When there is enough space/data available to perform a single AHB access, then the request is serviced."]
#[inline(always)]
pub fn asap_cfg(self) -> &'a mut W { self.variant(FIFOCFG_A::ASAP_CFG) }
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 28)) | (((value as u32) & 0x03) << 28);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - Source with Peripheral identifier"]
#[inline(always)]
pub fn src_per(&self) -> SRC_PER_R { SRC_PER_R::new((self.bits & 0x0f) as u8) }
#[doc = "Bits 4:7 - Destination with Peripheral identifier"]
#[inline(always)]
pub fn dst_per(&self) -> DST_PER_R { DST_PER_R::new(((self.bits >> 4) & 0x0f) as u8) }
#[doc = "Bit 9 - Software or Hardware Selection for the Source"]
#[inline(always)]
pub fn src_h2sel(&self) -> SRC_H2SEL_R { SRC_H2SEL_R::new(((self.bits >> 9) & 0x01) != 0) }
#[doc = "Bit 13 - Software or Hardware Selection for the Destination"]
#[inline(always)]
pub fn dst_h2sel(&self) -> DST_H2SEL_R { DST_H2SEL_R::new(((self.bits >> 13) & 0x01) != 0) }
#[doc = "Bit 16 - Stop On Done"]
#[inline(always)]
pub fn sod(&self) -> SOD_R { SOD_R::new(((self.bits >> 16) & 0x01) != 0) }
#[doc = "Bit 20 - Interface Lock"]
#[inline(always)]
pub fn lock_if(&self) -> LOCK_IF_R { LOCK_IF_R::new(((self.bits >> 20) & 0x01) != 0) }
#[doc = "Bit 21 - Bus Lock"]
#[inline(always)]
pub fn lock_b(&self) -> LOCK_B_R { LOCK_B_R::new(((self.bits >> 21) & 0x01) != 0) }
#[doc = "Bit 22 - Master Interface Arbiter Lock"]
#[inline(always)]
pub fn lock_if_l(&self) -> LOCK_IF_L_R { LOCK_IF_L_R::new(((self.bits >> 22) & 0x01) != 0) }
#[doc = "Bits 24:26 - AHB Protection"]
#[inline(always)]
pub fn ahb_prot(&self) -> AHB_PROT_R { AHB_PROT_R::new(((self.bits >> 24) & 0x07) as u8) }
#[doc = "Bits 28:29 - FIFO Configuration"]
#[inline(always)]
pub fn fifocfg(&self) -> FIFOCFG_R { FIFOCFG_R::new(((self.bits >> 28) & 0x03) as u8) }
}
impl W {
#[doc = "Bits 0:3 - Source with Peripheral identifier"]
#[inline(always)]
pub fn src_per(&mut self) -> SRC_PER_W { SRC_PER_W { w: self } }
#[doc = "Bits 4:7 - Destination with Peripheral identifier"]
#[inline(always)]
pub fn dst_per(&mut self) -> DST_PER_W { DST_PER_W { w: self } }
#[doc = "Bit 9 - Software or Hardware Selection for the Source"]
#[inline(always)]
pub fn src_h2sel(&mut self) -> SRC_H2SEL_W { SRC_H2SEL_W { w: self } }
#[doc = "Bit 13 - Software or Hardware Selection for the Destination"]
#[inline(always)]
pub fn dst_h2sel(&mut self) -> DST_H2SEL_W { DST_H2SEL_W { w: self } }
#[doc = "Bit 16 - Stop On Done"]
#[inline(always)]
pub fn sod(&mut self) -> SOD_W { SOD_W { w: self } }
#[doc = "Bit 20 - Interface Lock"]
#[inline(always)]
pub fn lock_if(&mut self) -> LOCK_IF_W { LOCK_IF_W { w: self } }
#[doc = "Bit 21 - Bus Lock"]
#[inline(always)]
pub fn lock_b(&mut self) -> LOCK_B_W { LOCK_B_W { w: self } }
#[doc = "Bit 22 - Master Interface Arbiter Lock"]
#[inline(always)]
pub fn lock_if_l(&mut self) -> LOCK_IF_L_W { LOCK_IF_L_W { w: self } }
#[doc = "Bits 24:26 - AHB Protection"]
#[inline(always)]
pub fn ahb_prot(&mut self) -> AHB_PROT_W { AHB_PROT_W { w: self } }
#[doc = "Bits 28:29 - FIFO Configuration"]
#[inline(always)]
pub fn fifocfg(&mut self) -> FIFOCFG_W { FIFOCFG_W { w: self } }
}
|
}
|
waitingproof.go
|
package channeldb
import (
"encoding/binary"
"sync"
"io"
"bytes"
"github.com/go-errors/errors"
"github.com/ltcsuite/lnd/channeldb/kvdb"
"github.com/ltcsuite/lnd/lnwire"
)
var (
// waitingProofsBucketKey byte string name of the waiting proofs store.
waitingProofsBucketKey = []byte("waitingproofs")
// ErrWaitingProofNotFound is returned if waiting proofs haven't been
// found by db.
ErrWaitingProofNotFound = errors.New("waiting proofs haven't been " +
"found")
|
ErrWaitingProofAlreadyExist = errors.New("waiting proof with such " +
"key already exist")
)
// WaitingProofStore is the bold db map-like storage for half announcement
// signatures. The one responsibility of this storage is to be able to
// retrieve waiting proofs after client restart.
type WaitingProofStore struct {
// cache is used in order to reduce the number of redundant get
// calls, when object isn't stored in it.
cache map[WaitingProofKey]struct{}
db *DB
mu sync.RWMutex
}
// NewWaitingProofStore creates new instance of proofs storage.
func NewWaitingProofStore(db *DB) (*WaitingProofStore, error) {
s := &WaitingProofStore{
db: db,
cache: make(map[WaitingProofKey]struct{}),
}
if err := s.ForAll(func(proof *WaitingProof) error {
s.cache[proof.Key()] = struct{}{}
return nil
}); err != nil && err != ErrWaitingProofNotFound {
return nil, err
}
return s, nil
}
// Add adds new waiting proof in the storage.
func (s *WaitingProofStore) Add(proof *WaitingProof) error {
s.mu.Lock()
defer s.mu.Unlock()
err := kvdb.Update(s.db, func(tx kvdb.RwTx) error {
var err error
var b bytes.Buffer
// Get or create the bucket.
bucket, err := tx.CreateTopLevelBucket(waitingProofsBucketKey)
if err != nil {
return err
}
// Encode the objects and place it in the bucket.
if err := proof.Encode(&b); err != nil {
return err
}
key := proof.Key()
return bucket.Put(key[:], b.Bytes())
})
if err != nil {
return err
}
// Knowing that the write succeeded, we can now update the in-memory
// cache with the proof's key.
s.cache[proof.Key()] = struct{}{}
return nil
}
// Remove removes the proof from storage by its key.
func (s *WaitingProofStore) Remove(key WaitingProofKey) error {
s.mu.Lock()
defer s.mu.Unlock()
if _, ok := s.cache[key]; !ok {
return ErrWaitingProofNotFound
}
err := kvdb.Update(s.db, func(tx kvdb.RwTx) error {
// Get or create the top bucket.
bucket := tx.ReadWriteBucket(waitingProofsBucketKey)
if bucket == nil {
return ErrWaitingProofNotFound
}
return bucket.Delete(key[:])
})
if err != nil {
return err
}
// Since the proof was successfully deleted from the store, we can now
// remove it from the in-memory cache.
delete(s.cache, key)
return nil
}
// ForAll iterates thought all waiting proofs and passing the waiting proof
// in the given callback.
func (s *WaitingProofStore) ForAll(cb func(*WaitingProof) error) error {
return kvdb.View(s.db, func(tx kvdb.RTx) error {
bucket := tx.ReadBucket(waitingProofsBucketKey)
if bucket == nil {
return ErrWaitingProofNotFound
}
// Iterate over objects buckets.
return bucket.ForEach(func(k, v []byte) error {
// Skip buckets fields.
if v == nil {
return nil
}
r := bytes.NewReader(v)
proof := &WaitingProof{}
if err := proof.Decode(r); err != nil {
return err
}
return cb(proof)
})
})
}
// Get returns the object which corresponds to the given index.
func (s *WaitingProofStore) Get(key WaitingProofKey) (*WaitingProof, error) {
proof := &WaitingProof{}
s.mu.RLock()
defer s.mu.RUnlock()
if _, ok := s.cache[key]; !ok {
return nil, ErrWaitingProofNotFound
}
err := kvdb.View(s.db, func(tx kvdb.RTx) error {
bucket := tx.ReadBucket(waitingProofsBucketKey)
if bucket == nil {
return ErrWaitingProofNotFound
}
// Iterate over objects buckets.
v := bucket.Get(key[:])
if v == nil {
return ErrWaitingProofNotFound
}
r := bytes.NewReader(v)
return proof.Decode(r)
})
return proof, err
}
// WaitingProofKey is the proof key which uniquely identifies the waiting
// proof object. The goal of this key is distinguish the local and remote
// proof for the same channel id.
type WaitingProofKey [9]byte
// WaitingProof is the storable object, which encapsulate the half proof and
// the information about from which side this proof came. This structure is
// needed to make channel proof exchange persistent, so that after client
// restart we may receive remote/local half proof and process it.
type WaitingProof struct {
*lnwire.AnnounceSignatures
isRemote bool
}
// NewWaitingProof constructs a new waiting prof instance.
func NewWaitingProof(isRemote bool, proof *lnwire.AnnounceSignatures) *WaitingProof {
return &WaitingProof{
AnnounceSignatures: proof,
isRemote: isRemote,
}
}
// OppositeKey returns the key which uniquely identifies opposite waiting proof.
func (p *WaitingProof) OppositeKey() WaitingProofKey {
var key [9]byte
binary.BigEndian.PutUint64(key[:8], p.ShortChannelID.ToUint64())
if !p.isRemote {
key[8] = 1
}
return key
}
// Key returns the key which uniquely identifies waiting proof.
func (p *WaitingProof) Key() WaitingProofKey {
var key [9]byte
binary.BigEndian.PutUint64(key[:8], p.ShortChannelID.ToUint64())
if p.isRemote {
key[8] = 1
}
return key
}
// Encode writes the internal representation of waiting proof in byte stream.
func (p *WaitingProof) Encode(w io.Writer) error {
if err := binary.Write(w, byteOrder, p.isRemote); err != nil {
return err
}
if err := p.AnnounceSignatures.Encode(w, 0); err != nil {
return err
}
return nil
}
// Decode reads the data from the byte stream and initializes the
// waiting proof object with it.
func (p *WaitingProof) Decode(r io.Reader) error {
if err := binary.Read(r, byteOrder, &p.isRemote); err != nil {
return err
}
msg := &lnwire.AnnounceSignatures{}
if err := msg.Decode(r, 0); err != nil {
return err
}
(*p).AnnounceSignatures = msg
return nil
}
|
// ErrWaitingProofAlreadyExist is returned if waiting proofs haven't been
// found by db.
|
txstatus.rs
|
#[doc = "Reader of register TXSTATUS"]
pub type R = crate::R<u32, super::TXSTATUS>;
#[doc = "Status of data in register TXDATA\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum TXSTATUS_A {
#[doc = "0: No data pending in register TXDATA"]
|
NODATAPENDING,
#[doc = "1: Data pending in register TXDATA"]
DATAPENDING,
}
impl From<TXSTATUS_A> for bool {
#[inline(always)]
fn from(variant: TXSTATUS_A) -> Self {
match variant {
TXSTATUS_A::NODATAPENDING => false,
TXSTATUS_A::DATAPENDING => true,
}
}
}
#[doc = "Reader of field `TXSTATUS`"]
pub type TXSTATUS_R = crate::R<bool, TXSTATUS_A>;
impl TXSTATUS_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TXSTATUS_A {
match self.bits {
false => TXSTATUS_A::NODATAPENDING,
true => TXSTATUS_A::DATAPENDING,
}
}
#[doc = "Checks if the value of the field is `NODATAPENDING`"]
#[inline(always)]
pub fn is_no_data_pending(&self) -> bool {
*self == TXSTATUS_A::NODATAPENDING
}
#[doc = "Checks if the value of the field is `DATAPENDING`"]
#[inline(always)]
pub fn is_data_pending(&self) -> bool {
*self == TXSTATUS_A::DATAPENDING
}
}
impl R {
#[doc = "Bit 0 - Status of data in register TXDATA"]
#[inline(always)]
pub fn txstatus(&self) -> TXSTATUS_R {
TXSTATUS_R::new((self.bits & 0x01) != 0)
}
}
| |
set_cen_inter_region_bandwidth_limit.go
|
package cbn
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//
// Code generated by Alibaba Cloud SDK Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests"
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses"
)
// SetCenInterRegionBandwidthLimit invokes the cbn.SetCenInterRegionBandwidthLimit API synchronously
func (client *Client) SetCenInterRegionBandwidthLimit(request *SetCenInterRegionBandwidthLimitRequest) (response *SetCenInterRegionBandwidthLimitResponse, err error) {
response = CreateSetCenInterRegionBandwidthLimitResponse()
err = client.DoAction(request, response)
return
}
// SetCenInterRegionBandwidthLimitWithChan invokes the cbn.SetCenInterRegionBandwidthLimit API asynchronously
func (client *Client) SetCenInterRegionBandwidthLimitWithChan(request *SetCenInterRegionBandwidthLimitRequest) (<-chan *SetCenInterRegionBandwidthLimitResponse, <-chan error) {
responseChan := make(chan *SetCenInterRegionBandwidthLimitResponse, 1)
errChan := make(chan error, 1)
err := client.AddAsyncTask(func() {
defer close(responseChan)
defer close(errChan)
response, err := client.SetCenInterRegionBandwidthLimit(request)
if err != nil {
errChan <- err
} else {
responseChan <- response
}
})
if err != nil {
errChan <- err
close(responseChan)
close(errChan)
}
return responseChan, errChan
}
// SetCenInterRegionBandwidthLimitWithCallback invokes the cbn.SetCenInterRegionBandwidthLimit API asynchronously
func (client *Client) SetCenInterRegionBandwidthLimitWithCallback(request *SetCenInterRegionBandwidthLimitRequest, callback func(response *SetCenInterRegionBandwidthLimitResponse, err error)) <-chan int {
result := make(chan int, 1)
err := client.AddAsyncTask(func() {
var response *SetCenInterRegionBandwidthLimitResponse
var err error
defer close(result)
response, err = client.SetCenInterRegionBandwidthLimit(request)
callback(response, err)
result <- 1
})
if err != nil {
defer close(result)
callback(nil, err)
result <- 0
}
return result
|
// SetCenInterRegionBandwidthLimitRequest is the request struct for api SetCenInterRegionBandwidthLimit
type SetCenInterRegionBandwidthLimitRequest struct {
*requests.RpcRequest
ResourceOwnerId requests.Integer `position:"Query" name:"ResourceOwnerId"`
CenId string `position:"Query" name:"CenId"`
BandwidthPackageId string `position:"Query" name:"BandwidthPackageId"`
ResourceOwnerAccount string `position:"Query" name:"ResourceOwnerAccount"`
OwnerAccount string `position:"Query" name:"OwnerAccount"`
OppositeRegionId string `position:"Query" name:"OppositeRegionId"`
OwnerId requests.Integer `position:"Query" name:"OwnerId"`
LocalRegionId string `position:"Query" name:"LocalRegionId"`
BandwidthLimit requests.Integer `position:"Query" name:"BandwidthLimit"`
}
// SetCenInterRegionBandwidthLimitResponse is the response struct for api SetCenInterRegionBandwidthLimit
type SetCenInterRegionBandwidthLimitResponse struct {
*responses.BaseResponse
RequestId string `json:"RequestId" xml:"RequestId"`
}
// CreateSetCenInterRegionBandwidthLimitRequest creates a request to invoke SetCenInterRegionBandwidthLimit API
func CreateSetCenInterRegionBandwidthLimitRequest() (request *SetCenInterRegionBandwidthLimitRequest) {
request = &SetCenInterRegionBandwidthLimitRequest{
RpcRequest: &requests.RpcRequest{},
}
request.InitWithApiInfo("Cbn", "2017-09-12", "SetCenInterRegionBandwidthLimit", "cbn", "openAPI")
request.Method = requests.POST
return
}
// CreateSetCenInterRegionBandwidthLimitResponse creates a response to parse from SetCenInterRegionBandwidthLimit response
func CreateSetCenInterRegionBandwidthLimitResponse() (response *SetCenInterRegionBandwidthLimitResponse) {
response = &SetCenInterRegionBandwidthLimitResponse{
BaseResponse: &responses.BaseResponse{},
}
return
}
|
}
|
box3d.rs
|
// Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use super::UnknownUnit;
use scale::Scale;
use num::*;
use point::{Point3D, point3};
use vector::Vector3D;
use size::Size3D;
use approxord::{min, max};
use nonempty::NonEmpty;
use num_traits::NumCast;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use core::borrow::Borrow;
use core::cmp::PartialOrd;
use core::fmt;
use core::hash::{Hash, Hasher};
use core::ops::{Add, Div, Mul, Sub};
/// An axis aligned 3D box represented by its minimum and maximum coordinates.
#[repr(C)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "serde", serde(bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'de>")))]
pub struct Box3D<T, U> {
pub min: Point3D<T, U>,
pub max: Point3D<T, U>,
}
impl<T: Hash, U> Hash for Box3D<T, U> {
fn hash<H: Hasher>(&self, h: &mut H) {
self.min.hash(h);
self.max.hash(h);
}
}
impl<T: Copy, U> Copy for Box3D<T, U> {}
impl<T: Copy, U> Clone for Box3D<T, U> {
fn clone(&self) -> Self {
*self
}
}
impl<T: PartialEq, U> PartialEq<Box3D<T, U>> for Box3D<T, U> {
fn eq(&self, other: &Self) -> bool {
self.min.eq(&other.min) && self.max.eq(&other.max)
}
}
impl<T: Eq, U> Eq for Box3D<T, U> {}
impl<T: fmt::Debug, U> fmt::Debug for Box3D<T, U> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Box3D({:?}, {:?})", self.min, self.max)
}
}
impl<T: fmt::Display, U> fmt::Display for Box3D<T, U> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "Box3D({}, {})", self.min, self.max)
}
}
impl<T, U> Box3D<T, U> {
/// Constructor.
pub const fn new(min: Point3D<T, U>, max: Point3D<T, U>) -> Self {
Box3D {
min,
max,
}
}
}
impl<T, U> Box3D<T, U>
where
T: Copy + Zero + PartialOrd,
{
/// Creates a Box3D of the given size, at offset zero.
#[inline]
pub fn from_size(size: Size3D<T, U>) -> Self {
let zero = Point3D::zero();
let point = size.to_vector().to_point();
Box3D::from_points(&[zero, point])
}
}
impl<T, U> Box3D<T, U>
where
T: Copy + PartialOrd,
{
/// Returns true if the box has a negative volume.
///
/// The common interpretation for a negative box is to consider it empty. It can be obtained
/// by calculating the intersection of two boxes that do not intersect.
#[inline]
pub fn is_negative(&self) -> bool {
self.max.x < self.min.x || self.max.y < self.min.y || self.max.z < self.min.z
}
/// Returns true if the size is zero or negative.
#[inline]
pub fn is_empty_or_negative(&self) -> bool {
self.max.x <= self.min.x || self.max.y <= self.min.y || self.max.z <= self.min.z
}
#[inline]
pub fn to_non_empty(&self) -> Option<NonEmpty<Self>> {
if self.is_empty_or_negative() {
return None;
}
Some(NonEmpty(*self))
}
#[inline]
pub fn intersects(&self, other: &Self) -> bool {
self.min.x < other.max.x
&& self.max.x > other.min.x
&& self.min.y < other.max.y
&& self.max.y > other.min.y
&& self.min.z < other.max.z
&& self.max.z > other.min.z
}
#[inline]
pub fn try_intersection(&self, other: &Self) -> Option<NonEmpty<Self>> {
if !self.intersects(other) {
return None;
}
Some(NonEmpty(self.intersection(other)))
}
pub fn intersection(&self, other: &Self) -> Self {
let intersection_min = Point3D::new(
max(self.min.x, other.min.x),
max(self.min.y, other.min.y),
max(self.min.z, other.min.z),
);
let intersection_max = Point3D::new(
min(self.max.x, other.max.x),
min(self.max.y, other.max.y),
min(self.max.z, other.max.z),
);
Box3D::new(
intersection_min,
intersection_max,
)
}
}
impl<T, U> Box3D<T, U>
where
T: Copy + Add<T, Output = T>,
{
/// Returns the same box3d, translated by a vector.
#[inline]
#[must_use]
pub fn translate(&self, by: Vector3D<T, U>) -> Self {
Box3D {
min: self.min + by,
max: self.max + by,
}
}
}
impl<T, U> Box3D<T, U>
where
T: Copy + PartialOrd + Zero,
{
/// Returns true if this box3d contains the point. Points are considered
/// in the box3d if they are on the front, left or top faces, but outside if they
/// are on the back, right or bottom faces.
#[inline]
pub fn contains(&self, other: Point3D<T, U>) -> bool {
self.min.x <= other.x && other.x < self.max.x
&& self.min.y <= other.y && other.y < self.max.y
&& self.min.z <= other.z && other.z < self.max.z
}
}
impl<T, U> Box3D<T, U>
where
T: Copy + PartialOrd + Zero + Sub<T, Output = T>,
{
/// Returns true if this box3d contains the interior of the other box3d. Always
/// returns true if other is empty, and always returns false if other is
/// nonempty but this box3d is empty.
#[inline]
pub fn contains_box(&self, other: &Self) -> bool {
other.is_empty_or_negative()
|| (self.min.x <= other.min.x && other.max.x <= self.max.x
&& self.min.y <= other.min.y && other.max.y <= self.max.y
&& self.min.z <= other.min.z && other.max.z <= self.max.z)
}
}
impl<T, U> Box3D<T, U>
where
T: Copy + Sub<T, Output = T>,
{
#[inline]
pub fn size(&self)-> Size3D<T, U> {
Size3D::new(
self.max.x - self.min.x,
self.max.y - self.min.y,
self.max.z - self.min.z,
)
}
#[inline]
pub fn width(&self) -> T {
self.max.x - self.min.x
}
#[inline]
pub fn height(&self) -> T {
self.max.y - self.min.y
}
#[inline]
pub fn depth(&self) -> T {
self.max.z - self.min.z
}
}
impl<T, U> Box3D<T, U>
where
T: Copy + PartialEq + Add<T, Output = T> + Sub<T, Output = T>,
{
/// Inflates the box by the specified sizes on each side respectively.
#[inline]
#[must_use]
pub fn inflate(&self, width: T, height: T, depth: T) -> Self {
Box3D::new(
Point3D::new(self.min.x - width, self.min.y - height, self.min.z - depth),
Point3D::new(self.max.x + width, self.max.y + height, self.max.z + depth),
)
}
}
impl<T, U> Box3D<T, U>
where
T: Copy + Zero + PartialOrd,
{
/// Returns the smallest box containing all of the provided points.
pub fn from_points<I>(points: I) -> Self
where
I: IntoIterator,
I::Item: Borrow<Point3D<T, U>>,
{
let mut points = points.into_iter();
let (mut min_x, mut min_y, mut min_z) = match points.next() {
Some(first) => (first.borrow().x, first.borrow().y, first.borrow().z),
None => return Box3D::zero(),
};
let (mut max_x, mut max_y, mut max_z) = (min_x, min_y, min_z);
for point in points {
let p = point.borrow();
if p.x < min_x {
min_x = p.x
}
if p.x > max_x {
max_x = p.x
}
if p.y < min_y {
min_y = p.y
}
if p.y > max_y {
max_y = p.y
}
if p.z < min_z {
min_z = p.z
}
if p.z > max_z {
max_z = p.z
}
}
Box3D {
min: point3(min_x, min_y, min_z),
max: point3(max_x, max_y, max_z),
}
}
}
impl<T, U> Box3D<T, U>
where
T: Copy + One + Add<Output = T> + Sub<Output = T> + Mul<Output = T>,
{
/// Linearly interpolate between this box3d and another box3d.
///
/// `t` is expected to be between zero and one.
#[inline]
pub fn lerp(&self, other: Self, t: T) -> Self {
Self::new(
self.min.lerp(other.min, t),
self.max.lerp(other.max, t),
)
}
}
impl<T, U> Box3D<T, U>
where
T: Copy + One + Add<Output = T> + Div<Output = T>,
{
pub fn center(&self) -> Point3D<T, U> {
let two = T::one() + T::one();
(self.min + self.max.to_vector()) / two
}
}
impl<T, U> Box3D<T, U>
where
T: Copy + Clone + PartialOrd + Add<T, Output = T> + Sub<T, Output = T> + Zero,
{
#[inline]
pub fn union(&self, other: &Self) -> Self {
Box3D::new(
Point3D::new(
min(self.min.x, other.min.x),
min(self.min.y, other.min.y),
min(self.min.z, other.min.z),
),
Point3D::new(
max(self.max.x, other.max.x),
max(self.max.y, other.max.y),
max(self.max.z, other.max.z),
),
)
}
}
impl<T, U> Box3D<T, U>
where
T: Copy,
{
#[inline]
pub fn scale<S: Copy>(&self, x: S, y: S, z: S) -> Self
where
T: Mul<S, Output = T>
{
Box3D::new(
Point3D::new(self.min.x * x, self.min.y * y, self.min.z * z),
Point3D::new(self.max.x * x, self.max.y * y, self.max.z * z),
)
}
}
impl<T, U> Box3D<T, U>
where
T: Copy + Mul<T, Output = T> + Sub<T, Output = T>,
{
#[inline]
pub fn volume(&self) -> T {
let size = self.size();
size.width * size.height * size.depth
}
#[inline]
pub fn xy_area(&self) -> T {
let size = self.size();
size.width * size.height
}
#[inline]
pub fn yz_area(&self) -> T {
let size = self.size();
size.depth * size.height
}
#[inline]
pub fn xz_area(&self) -> T {
let size = self.size();
size.depth * size.width
}
}
impl<T, U> Box3D<T, U>
where
T: Copy + Zero,
{
/// Constructor, setting all sides to zero.
pub fn zero() -> Self {
Box3D::new(Point3D::zero(), Point3D::zero())
}
}
impl<T, U> Box3D<T, U>
where
T: PartialEq,
{
/// Returns true if the volume is zero.
#[inline]
pub fn is_empty(&self) -> bool {
self.min.x == self.max.x || self.min.y == self.max.y || self.min.z == self.max.z
}
}
impl<T, U> Mul<T> for Box3D<T, U>
where
T: Copy + Mul<T, Output = T>,
{
type Output = Self;
#[inline]
fn mul(self, scale: T) -> Self {
Box3D::new(self.min * scale, self.max * scale)
}
}
impl<T, U> Div<T> for Box3D<T, U>
where
T: Copy + Div<T, Output = T>,
{
type Output = Self;
#[inline]
fn
|
(self, scale: T) -> Self {
Box3D::new(self.min / scale, self.max / scale)
}
}
impl<T, U1, U2> Mul<Scale<T, U1, U2>> for Box3D<T, U1>
where
T: Copy + Mul<T, Output = T>,
{
type Output = Box3D<T, U2>;
#[inline]
fn mul(self, scale: Scale<T, U1, U2>) -> Box3D<T, U2> {
Box3D::new(self.min * scale, self.max * scale)
}
}
impl<T, U1, U2> Div<Scale<T, U1, U2>> for Box3D<T, U2>
where
T: Copy + Div<T, Output = T>,
{
type Output = Box3D<T, U1>;
#[inline]
fn div(self, scale: Scale<T, U1, U2>) -> Box3D<T, U1> {
Box3D::new(self.min / scale, self.max / scale)
}
}
impl<T, Unit> Box3D<T, Unit>
where
T: Copy,
{
/// Drop the units, preserving only the numeric value.
#[inline]
pub fn to_untyped(&self) -> Box3D<T, UnknownUnit> {
Box3D {
min: self.min.to_untyped(),
max: self.max.to_untyped(),
}
}
/// Tag a unitless value with units.
#[inline]
pub fn from_untyped(c: &Box3D<T, UnknownUnit>) -> Box3D<T, Unit> {
Box3D {
min: Point3D::from_untyped(c.min),
max: Point3D::from_untyped(c.max),
}
}
/// Cast the unit
pub fn cast_unit<V>(&self) -> Box3D<T, V> {
Box3D::new(self.min.cast_unit(), self.max.cast_unit())
}
}
impl<T0, Unit> Box3D<T0, Unit>
where
T0: NumCast + Copy,
{
/// Cast from one numeric representation to another, preserving the units.
///
/// When casting from floating point to integer coordinates, the decimals are truncated
/// as one would expect from a simple cast, but this behavior does not always make sense
/// geometrically. Consider using round(), round_in or round_out() before casting.
pub fn cast<T1: NumCast + Copy>(&self) -> Box3D<T1, Unit> {
Box3D::new(
self.min.cast(),
self.max.cast(),
)
}
/// Fallible cast from one numeric representation to another, preserving the units.
///
/// When casting from floating point to integer coordinates, the decimals are truncated
/// as one would expect from a simple cast, but this behavior does not always make sense
/// geometrically. Consider using round(), round_in or round_out() before casting.
pub fn try_cast<T1: NumCast + Copy>(&self) -> Option<Box3D<T1, Unit>> {
match (self.min.try_cast(), self.max.try_cast()) {
(Some(a), Some(b)) => Some(Box3D::new(a, b)),
_ => None,
}
}
}
impl<T, U> Box3D<T, U>
where
T: Round,
{
/// Return a box3d with edges rounded to integer coordinates, such that
/// the returned box3d has the same set of pixel centers as the original
/// one.
/// Values equal to 0.5 round up.
/// Suitable for most places where integral device coordinates
/// are needed, but note that any translation should be applied first to
/// avoid pixel rounding errors.
/// Note that this is *not* rounding to nearest integer if the values are negative.
/// They are always rounding as floor(n + 0.5).
#[must_use]
pub fn round(&self) -> Self {
Box3D::new(self.min.round(), self.max.round())
}
}
impl<T, U> Box3D<T, U>
where
T: Floor + Ceil,
{
/// Return a box3d with faces/edges rounded to integer coordinates, such that
/// the original box3d contains the resulting box3d.
#[must_use]
pub fn round_in(&self) -> Self {
Box3D {
min: self.min.ceil(),
max: self.max.floor(),
}
}
/// Return a box3d with faces/edges rounded to integer coordinates, such that
/// the original box3d is contained in the resulting box3d.
#[must_use]
pub fn round_out(&self) -> Self {
Box3D {
min: self.min.floor(),
max: self.max.ceil(),
}
}
}
// Convenience functions for common casts
impl<T: NumCast + Copy, Unit> Box3D<T, Unit> {
/// Cast into an `f32` box3d.
pub fn to_f32(&self) -> Box3D<f32, Unit> {
self.cast()
}
/// Cast into an `f64` box3d.
pub fn to_f64(&self) -> Box3D<f64, Unit> {
self.cast()
}
/// Cast into an `usize` box3d, truncating decimals if any.
///
/// When casting from floating point cuboids, it is worth considering whether
/// to `round()`, `round_in()` or `round_out()` before the cast in order to
/// obtain the desired conversion behavior.
pub fn to_usize(&self) -> Box3D<usize, Unit> {
self.cast()
}
/// Cast into an `u32` box3d, truncating decimals if any.
///
/// When casting from floating point cuboids, it is worth considering whether
/// to `round()`, `round_in()` or `round_out()` before the cast in order to
/// obtain the desired conversion behavior.
pub fn to_u32(&self) -> Box3D<u32, Unit> {
self.cast()
}
/// Cast into an `i32` box3d, truncating decimals if any.
///
/// When casting from floating point cuboids, it is worth considering whether
/// to `round()`, `round_in()` or `round_out()` before the cast in order to
/// obtain the desired conversion behavior.
pub fn to_i32(&self) -> Box3D<i32, Unit> {
self.cast()
}
/// Cast into an `i64` box3d, truncating decimals if any.
///
/// When casting from floating point cuboids, it is worth considering whether
/// to `round()`, `round_in()` or `round_out()` before the cast in order to
/// obtain the desired conversion behavior.
pub fn to_i64(&self) -> Box3D<i64, Unit> {
self.cast()
}
}
impl<T, U> From<Size3D<T, U>> for Box3D<T, U>
where
T: Copy + Zero + PartialOrd,
{
fn from(b: Size3D<T, U>) -> Self {
Self::from_size(b)
}
}
/// Shorthand for `Box3D::new(Point3D::new(x1, y1, z1), Point3D::new(x2, y2, z2))`.
pub fn box3d<T: Copy, U>(min_x: T, min_y: T, min_z: T, max_x: T, max_y: T, max_z: T) -> Box3D<T, U> {
Box3D::new(Point3D::new(min_x, min_y, min_z), Point3D::new(max_x, max_y, max_z))
}
#[cfg(test)]
mod tests {
use {point3, size3, vec3};
use default::{Box3D, Point3D};
#[test]
fn test_new() {
let b = Box3D::new(point3(-1.0, -1.0, -1.0), point3(1.0, 1.0, 1.0));
assert!(b.min.x == -1.0);
assert!(b.min.y == -1.0);
assert!(b.min.z == -1.0);
assert!(b.max.x == 1.0);
assert!(b.max.y == 1.0);
assert!(b.max.z == 1.0);
}
#[test]
fn test_size() {
let b = Box3D::new(point3(-10.0, -10.0, -10.0), point3(10.0, 10.0, 10.0));
assert!(b.size().width == 20.0);
assert!(b.size().height == 20.0);
assert!(b.size().depth == 20.0);
}
#[test]
fn test_width_height_depth() {
let b = Box3D::new(point3(-10.0, -10.0, -10.0), point3(10.0, 10.0, 10.0));
assert!(b.width() == 20.0);
assert!(b.height() == 20.0);
assert!(b.depth() == 20.0);
}
#[test]
fn test_center() {
let b = Box3D::new(point3(-10.0, -10.0, -10.0), point3(10.0, 10.0, 10.0));
assert!(b.center() == Point3D::zero());
}
#[test]
fn test_volume() {
let b = Box3D::new(point3(-10.0, -10.0, -10.0), point3(10.0, 10.0, 10.0));
assert!(b.volume() == 8000.0);
}
#[test]
fn test_area() {
let b = Box3D::new(point3(-10.0, -10.0, -10.0), point3(10.0, 10.0, 10.0));
assert!(b.xy_area() == 400.0);
assert!(b.yz_area() == 400.0);
assert!(b.xz_area() == 400.0);
}
#[test]
fn test_from_points() {
let b = Box3D::from_points(&[point3(50.0, 160.0, 12.5), point3(100.0, 25.0, 200.0)]);
assert!(b.min == point3(50.0, 25.0, 12.5));
assert!(b.max == point3(100.0, 160.0, 200.0));
}
#[test]
fn test_min_max() {
let b = Box3D::from_points(&[point3(50.0, 25.0, 12.5), point3(100.0, 160.0, 200.0)]);
assert!(b.min.x == 50.0);
assert!(b.min.y == 25.0);
assert!(b.min.z == 12.5);
assert!(b.max.x == 100.0);
assert!(b.max.y == 160.0);
assert!(b.max.z == 200.0);
}
#[test]
fn test_round_in() {
let b = Box3D::from_points(&[point3(-25.5, -40.4, -70.9), point3(60.3, 36.5, 89.8)]).round_in();
assert!(b.min.x == -25.0);
assert!(b.min.y == -40.0);
assert!(b.min.z == -70.0);
assert!(b.max.x == 60.0);
assert!(b.max.y == 36.0);
assert!(b.max.z == 89.0);
}
#[test]
fn test_round_out() {
let b = Box3D::from_points(&[point3(-25.5, -40.4, -70.9), point3(60.3, 36.5, 89.8)]).round_out();
assert!(b.min.x == -26.0);
assert!(b.min.y == -41.0);
assert!(b.min.z == -71.0);
assert!(b.max.x == 61.0);
assert!(b.max.y == 37.0);
assert!(b.max.z == 90.0);
}
#[test]
fn test_round() {
let b = Box3D::from_points(&[point3(-25.5, -40.4, -70.9), point3(60.3, 36.5, 89.8)]).round();
assert!(b.min.x == -26.0);
assert!(b.min.y == -40.0);
assert!(b.min.z == -71.0);
assert!(b.max.x == 60.0);
assert!(b.max.y == 37.0);
assert!(b.max.z == 90.0);
}
#[test]
fn test_from_size() {
let b = Box3D::from_size(size3(30.0, 40.0, 50.0));
assert!(b.min == Point3D::zero());
assert!(b.size().width == 30.0);
assert!(b.size().height == 40.0);
assert!(b.size().depth == 50.0);
}
#[test]
fn test_translate() {
let size = size3(15.0, 15.0, 200.0);
let mut center = (size / 2.0).to_vector().to_point();
let b = Box3D::from_size(size);
assert!(b.center() == center);
let translation = vec3(10.0, 2.5, 9.5);
let b = b.translate(translation);
center += translation;
assert!(b.center() == center);
assert!(b.max.x == 25.0);
assert!(b.max.y == 17.5);
assert!(b.max.z == 209.5);
assert!(b.min.x == 10.0);
assert!(b.min.y == 2.5);
assert!(b.min.z == 9.5);
}
#[test]
fn test_union() {
let b1 = Box3D::from_points(&[point3(-20.0, -20.0, -20.0), point3(0.0, 20.0, 20.0)]);
let b2 = Box3D::from_points(&[point3(0.0, 20.0, 20.0), point3(20.0, -20.0, -20.0)]);
let b = b1.union(&b2);
assert!(b.max.x == 20.0);
assert!(b.max.y == 20.0);
assert!(b.max.z == 20.0);
assert!(b.min.x == -20.0);
assert!(b.min.y == -20.0);
assert!(b.min.z == -20.0);
assert!(b.volume() == (40.0 * 40.0 * 40.0));
}
#[test]
fn test_intersects() {
let b1 = Box3D::from_points(&[point3(-15.0, -20.0, -20.0), point3(10.0, 20.0, 20.0)]);
let b2 = Box3D::from_points(&[point3(-10.0, 20.0, 20.0), point3(15.0, -20.0, -20.0)]);
assert!(b1.intersects(&b2));
}
#[test]
fn test_intersection() {
let b1 = Box3D::from_points(&[point3(-15.0, -20.0, -20.0), point3(10.0, 20.0, 20.0)]);
let b2 = Box3D::from_points(&[point3(-10.0, 20.0, 20.0), point3(15.0, -20.0, -20.0)]);
let b = b1.intersection(&b2);
assert!(b.max.x == 10.0);
assert!(b.max.y == 20.0);
assert!(b.max.z == 20.0);
assert!(b.min.x == -10.0);
assert!(b.min.y == -20.0);
assert!(b.min.z == -20.0);
assert!(b.volume() == (20.0 * 40.0 * 40.0));
}
#[test]
fn test_try_intersection() {
let b1 = Box3D::from_points(&[point3(-15.0, -20.0, -20.0), point3(10.0, 20.0, 20.0)]);
let b2 = Box3D::from_points(&[point3(-10.0, 20.0, 20.0), point3(15.0, -20.0, -20.0)]);
assert!(b1.try_intersection(&b2).is_some());
let b1 = Box3D::from_points(&[point3(-15.0, -20.0, -20.0), point3(-10.0, 20.0, 20.0)]);
let b2 = Box3D::from_points(&[point3(10.0, 20.0, 20.0), point3(15.0, -20.0, -20.0)]);
assert!(b1.try_intersection(&b2).is_none());
}
#[test]
fn test_scale() {
let b = Box3D::from_points(&[point3(-10.0, -10.0, -10.0), point3(10.0, 10.0, 10.0)]);
let b = b.scale(0.5, 0.5, 0.5);
assert!(b.max.x == 5.0);
assert!(b.max.y == 5.0);
assert!(b.max.z == 5.0);
assert!(b.min.x == -5.0);
assert!(b.min.y == -5.0);
assert!(b.min.z == -5.0);
}
#[test]
fn test_zero() {
let b = Box3D::<f64>::zero();
assert!(b.max.x == 0.0);
assert!(b.max.y == 0.0);
assert!(b.max.z == 0.0);
assert!(b.min.x == 0.0);
assert!(b.min.y == 0.0);
assert!(b.min.z == 0.0);
}
#[test]
fn test_lerp() {
let b1 = Box3D::from_points(&[point3(-20.0, -20.0, -20.0), point3(-10.0, -10.0, -10.0)]);
let b2 = Box3D::from_points(&[point3(10.0, 10.0, 10.0), point3(20.0, 20.0, 20.0)]);
let b = b1.lerp(b2, 0.5);
assert!(b.center() == Point3D::zero());
assert!(b.size().width == 10.0);
assert!(b.size().height == 10.0);
assert!(b.size().depth == 10.0);
}
#[test]
fn test_contains() {
let b = Box3D::from_points(&[point3(-20.0, -20.0, -20.0), point3(20.0, 20.0, 20.0)]);
assert!(b.contains(point3(-15.3, 10.5, 18.4)));
}
#[test]
fn test_contains_box() {
let b1 = Box3D::from_points(&[point3(-20.0, -20.0, -20.0), point3(20.0, 20.0, 20.0)]);
let b2 = Box3D::from_points(&[point3(-14.3, -16.5, -19.3), point3(6.7, 17.6, 2.5)]);
assert!(b1.contains_box(&b2));
}
#[test]
fn test_inflate() {
let b = Box3D::from_points(&[point3(-20.0, -20.0, -20.0), point3(20.0, 20.0, 20.0)]);
let b = b.inflate(10.0, 5.0, 2.0);
assert!(b.size().width == 60.0);
assert!(b.size().height == 50.0);
assert!(b.size().depth == 44.0);
assert!(b.center() == Point3D::zero());
}
#[test]
fn test_is_empty() {
for i in 0..3 {
let mut coords_neg = [-20.0, -20.0, -20.0];
let mut coords_pos = [20.0, 20.0, 20.0];
coords_neg[i] = 0.0;
coords_pos[i] = 0.0;
let b = Box3D::from_points(&[Point3D::from(coords_neg), Point3D::from(coords_pos)]);
assert!(b.is_empty());
}
}
}
|
div
|
exporter.py
|
import argparse
import http.server
import logging
import sys
import prometheus_client
import xmrig_exporter
def main():
|
parser = argparse.ArgumentParser("Xmrig Exporter")
parser.add_argument("--port", type=int, default=9189)
parser.add_argument("--bind_address", default="0.0.0.0")
parser.add_argument("--url", required=True)
parser.add_argument("--token")
parser.add_argument("--verbose", "-v", action="count")
args = parser.parse_args()
if args.verbose:
level = logging.DEBUG
else:
level = logging.INFO
logging.basicConfig(stream=sys.stdout, level=level)
collector = xmrig_exporter.XmrigCollector(args.url, token=args.token)
prometheus_client.REGISTRY.register(collector)
handler = prometheus_client.MetricsHandler.factory(
prometheus_client.REGISTRY)
server = http.server.HTTPServer(
(args.bind_address, args.port), handler)
server.serve_forever()
|
|
_iwd.py
|
# Copyright 2019 The FastEstimator Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import math
from typing import Optional, TypeVar
import numpy as np
import tensorflow as tf
import torch
from fastestimator.backend._maximum import maximum
from fastestimator.backend._reduce_sum import reduce_sum
from fastestimator.backend._reshape import reshape
from fastestimator.backend._tensor_pow import tensor_pow
from fastestimator.backend._to_tensor import to_tensor
from fastestimator.util.util import TENSOR_TO_NP_DTYPE
Tensor = TypeVar('Tensor', tf.Tensor, torch.Tensor, np.ndarray)
def iwd(tensor: Tensor,
power: float = 1.0,
max_prob: float = 0.95,
pairwise_distance: float = 1.0,
eps: Optional[Tensor] = None) -> Tensor:
"""Compute the Inverse Weighted Distance from the given input.
This can be used as an activation function for the final layer of a neural network instead of softmax. For example,
instead of: model.add(layers.Dense(classes, activation='softmax')), you could use:
model.add(layers.Dense(classes, activation=lambda x: iwd(tf.nn.sigmoid(x))))
This method can be used with Numpy data:
```python
n = np.array([[0.5]*5, [0]+[1]*4])
b = fe.backend.iwd(n) # [[0.2, 0.2, 0.2, 0.2, 0.2], [0.95, 0.0125, 0.0125, 0.0125, 0.0125]]
```
This method can be used with TensorFlow tensors:
```python
t = tf.constant([[0.5]*5, [0]+[1]*4])
b = fe.backend.iwd(n) # [[0.2, 0.2, 0.2, 0.2, 0.2], [0.95, 0.0125, 0.0125, 0.0125, 0.0125]]
```
This method can be used with PyTorch tensors:
```python
p = torch.tensor([[0.5]*5, [0]+[1]*4])
|
tensor: The input value. Should be of shape (Batch, C) where every element in C corresponds to a (non-negative)
distance to a target class.
power: The power to raise the inverse distances to. 1.0 results in a fairly intuitive probability output. Larger
powers can widen regions of certainty, whereas values between 0 and 1 can widen regions of uncertainty.
max_prob: The maximum probability to assign to a class estimate when it is distance zero away from the target.
For numerical stability this must be less than 1.0. We have found that using smaller values like 0.95 can
lead to natural adversarial robustness.
pairwise_distance: The distance to any other class when the distance to a target class is zero. For example, if
you have a perfect match for class 'a', what distance should be reported to class 'b'. If you have a metric
where this isn't constant, just use an approximate expected distance. In that case `max_prob` will only give
you approximate control over the true maximum probability.
eps: The numeric stability constant to be used when d approaches zero. If None then it will be computed using
`max_prob` and `pairwise_distance`. If not None, then `max_prob` and `pairwise_distance` will be ignored.
Returns:
A probability distribution of shape (Batch, C) where smaller distances from `tensor` correspond to larger
probabilities.
"""
if eps is None:
eps = np.array(pairwise_distance * math.pow((1.0 - max_prob) / (max_prob * (tensor.shape[-1] - 1)), 1 / power),
dtype=TENSOR_TO_NP_DTYPE[tensor.dtype])
eps = to_tensor(
eps, target_type='torch' if isinstance(tensor, torch.Tensor) else 'tf' if tf.is_tensor(tensor) else 'np')
if isinstance(eps, torch.Tensor):
eps = eps.to("cuda:0" if torch.cuda.is_available() else "cpu")
tensor = maximum(tensor, eps)
tensor = tensor_pow(1.0 / tensor, power)
tensor = tensor / reshape(reduce_sum(tensor, axis=-1), shape=[-1, 1])
return tensor
|
b = fe.backend.iwd(n) # [[0.2, 0.2, 0.2, 0.2, 0.2], [0.95, 0.0125, 0.0125, 0.0125, 0.0125]]
```
Args:
|
test_accessor_list_view.py
|
from unittest import mock
import pytest
from django.http import Http404
from know_me import serializers, views
def
|
(api_rf, km_user_accessor_factory, km_user_factory):
"""
The queryset for the view should include all accessors granting
access to the requesting user's Know Me user.
"""
km_user = km_user_factory()
api_rf.user = km_user.user
km_user_accessor_factory(km_user=km_user)
km_user_accessor_factory(km_user=km_user)
view = views.AccessorListView()
view.request = api_rf.get("/")
assert list(view.get_queryset()) == list(km_user.km_user_accessors.all())
def test_get_queryset_no_km_user(api_rf, user_factory):
"""
If the requesting user has no associated Know Me user, a 404 error
should be raised.
"""
user = user_factory()
api_rf.user = user
view = views.AccessorListView()
view.request = api_rf.get("/")
with pytest.raises(Http404):
view.get_queryset()
def test_get_serializer_class():
"""
Test the serializer class used by the view.
"""
view = views.AccessorListView()
assert view.get_serializer_class() == serializers.KMUserAccessorSerializer
def test_perform_create(api_rf, km_user_factory):
"""
If the requesting user has an associated Know Me user, that Know Me
user should be passed to the serializer being saved.
"""
km_user = km_user_factory()
api_rf.user = km_user.user
serializer = mock.Mock(name="Mock Serializer")
view = views.AccessorListView()
view.request = api_rf.post("/")
view.perform_create(serializer)
assert serializer.save.call_args[1] == {"km_user": km_user}
def test_perform_create_no_km_user(api_rf, user_factory):
"""
If the requesting user does not have an associated Know Me user, the
method should throw a 404 exception.
"""
user = user_factory()
api_rf.user = user
serializer = mock.Mock(name="Mock Serializer")
view = views.AccessorListView()
view.request = api_rf.post("/")
with pytest.raises(Http404):
view.perform_create(serializer)
|
test_get_queryset
|
resolver.go
|
package healthcheck
import (
"context"
"github.com/kyma-incubator/compass/components/director/pkg/graphql"
)
// HealthCheckService missing godoc
//go:generate mockery --name=HealthCheckService --output=automock --outpkg=automock --case=underscore
type HealthCheckService interface{}
// HealthCheckConverter missing godoc
//go:generate mockery --name=HealthCheckConverter --output=automock --outpkg=automock --case=underscore
type HealthCheckConverter interface{}
// Resolver missing godoc
type Resolver struct {
svc HealthCheckService
converter HealthCheckConverter
}
// NewResolver missing godoc
func NewResolver(svc HealthCheckService) *Resolver
|
// HealthChecks missing godoc
func (r *Resolver) HealthChecks(ctx context.Context, types []graphql.HealthCheckType, origin *string, first *int, after *graphql.PageCursor) (*graphql.HealthCheckPage, error) {
return &graphql.HealthCheckPage{
Data: []*graphql.HealthCheck{},
PageInfo: &graphql.PageInfo{
HasNextPage: false,
EndCursor: "",
StartCursor: "",
},
TotalCount: 0,
}, nil
}
|
{
return &Resolver{
svc: svc,
converter: &converter{},
}
}
|
electionService.js
|
module.exports = [
'$http',
function ($http) {
|
this.getActiveElection = function (accessCode) {
return $http.get(`/api/election/active?accessCode=${accessCode}`);
};
},
];
| |
bloom.py
|
# Copyright (C) 2013-2014 The python-bitcoinlib developers
#
# This file is part of python-bitcoinlib.
#
# It is subject to the license terms in the LICENSE file found in the top-level
# directory of this distribution.
#
# No part of python-bitcoinlib, including this file, may be copied, modified,
# propagated, or distributed except according to the terms contained in the
# LICENSE file.
"""Bloom filter support"""
from __future__ import absolute_import, division, print_function, unicode_literals
import struct
import sys
import math
import bitcoin.core
import bitcoin.core.serialize
def _ROTL32(x, r):
assert x <= 0xFFFFFFFF
return ((x << r) & 0xFFFFFFFF) | (x >> (32 - r))
def MurmurHash3(nHashSeed, vDataToHash):
"""MurmurHash3 (x86_32)
Used for bloom filters. See http://code.google.com/p/smhasher/source/browse/trunk/MurmurHash3.cpp
"""
assert nHashSeed <= 0xFFFFFFFF
h1 = nHashSeed
c1 = 0xcc9e2d51
c2 = 0x1b873593
# body
i = 0
while (i < len(vDataToHash) - len(vDataToHash) % 4
and len(vDataToHash) - i >= 4):
k1 = struct.unpack(b"<L", vDataToHash[i:i+4])[0]
k1 = (k1 * c1) & 0xFFFFFFFF
k1 = _ROTL32(k1, 15)
k1 = (k1 * c2) & 0xFFFFFFFF
h1 ^= k1
h1 = _ROTL32(h1, 13)
h1 = (((h1*5) & 0xFFFFFFFF) + 0xe6546b64) & 0xFFFFFFFF
i += 4
# tail
k1 = 0
j = (len(vDataToHash) // 4) * 4
bord = ord
if sys.version > '3':
# In Py3 indexing bytes returns numbers, not characters
bord = lambda x: x
if len(vDataToHash) & 3 >= 3:
k1 ^= bord(vDataToHash[j+2]) << 16
if len(vDataToHash) & 3 >= 2:
k1 ^= bord(vDataToHash[j+1]) << 8
if len(vDataToHash) & 3 >= 1:
k1 ^= bord(vDataToHash[j])
k1 &= 0xFFFFFFFF
k1 = (k1 * c1) & 0xFFFFFFFF
k1 = _ROTL32(k1, 15)
k1 = (k1 * c2) & 0xFFFFFFFF
h1 ^= k1
# finalization
h1 ^= len(vDataToHash) & 0xFFFFFFFF
h1 ^= (h1 & 0xFFFFFFFF) >> 16
h1 *= 0x85ebca6b
h1 ^= (h1 & 0xFFFFFFFF) >> 13
h1 *= 0xc2b2ae35
h1 ^= (h1 & 0xFFFFFFFF) >> 16
return h1 & 0xFFFFFFFF
class CBloomFilter(bitcoin.core.serialize.Serializable):
# 20,000 items with fp rate < 0.1% or 10,000 items and <0.0001%
MAX_BLOOM_FILTER_SIZE = 36000
MAX_HASH_FUNCS = 50
UPDATE_NONE = 0
UPDATE_ALL = 1
UPDATE_P2PUBKEY_ONLY = 2
UPDATE_MASK = 3
def __init__(self, nElements, nFPRate, nTweak, nFlags):
"""Create a new bloom filter
The filter will have a given false-positive rate when filled with the
given number of elements.
Note that if the given parameters will result in a filter outside the
bounds of the protocol limits, the filter created will be as close to
the given parameters as possible within the protocol limits. This will
apply if nFPRate is very low or nElements is unreasonably high.
nTweak is a constant which is added to the seed value passed to the
hash function It should generally always be a random value (and is
largely only exposed for unit testing)
nFlags should be one of the UPDATE_* enums (but not _MASK)
"""
LN2SQUARED = 0.4804530139182014246671025263266649717305529515945455
LN2 = 0.6931471805599453094172321214581765680755001343602552
self.vData = bytearray(int(min(-1 / LN2SQUARED * nElements * math.log(nFPRate), self.MAX_BLOOM_FILTER_SIZE * 8) / 8))
self.nHashFuncs = int(min(len(self.vData) * 8 / nElements * LN2, self.MAX_HASH_FUNCS))
self.nTweak = nTweak
self.nFlags = nFlags
def bloom_hash(self, nHashNum, vDataToHash):
return MurmurHash3(((nHashNum * 0xFBA4C795) + self.nTweak) & 0xFFFFFFFF, vDataToHash) % (len(self.vData) * 8)
__bit_mask = bytearray([0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80])
def insert(self, elem):
"""Insert an element in the filter.
elem may be a COutPoint or bytes
"""
if isinstance(elem, bitcoin.core.COutPoint):
elem = elem.serialize()
if len(self.vData) == 1 and self.vData[0] == 0xff:
return
for i in range(0, self.nHashFuncs):
nIndex = self.bloom_hash(i, elem)
# Sets bit nIndex of vData
self.vData[nIndex >> 3] |= self.__bit_mask[7 & nIndex]
def contains(self, elem):
"""Test if the filter contains an element
elem may be a COutPoint or bytes
"""
if isinstance(elem, bitcoin.core.COutPoint):
elem = elem.serialize()
if len(self.vData) == 1 and self.vData[0] == 0xff:
return True
for i in range(0, self.nHashFuncs):
nIndex = self.bloom_hash(i, elem)
if not (self.vData[nIndex >> 3] & self.__bit_mask[7 & nIndex]):
return False
return True
def IsWithinSizeConstraints(self):
return len(self.vData) <= self.MAX_BLOOM_FILTER_SIZE and self.nHashFuncs <= self.MAX_HASH_FUNCS
def IsRelevantAndUpdate(tx, tx_hash):
# Not useful for a client, so not implemented yet.
raise NotImplementedError
__struct = struct.Struct(b'<IIB')
@classmethod
def stream_deserialize(cls, f):
vData = bytearray(bitcoin.core.serialize.BytesSerializer.stream_deserialize(f))
(nHashFuncs,
nTweak,
nFlags) = CBloomFilter.__struct.unpack(bitcoin.core.ser_read(f, CBloomFilter.__struct.size))
# These arguments can be fake, the real values are set just after
deserialized = cls(1, 0.01, 0, CBloomFilter.UPDATE_ALL)
deserialized.vData = vData
deserialized.nHashFuncs = nHashFuncs
deserialized.nTweak = nTweak
deserialized.nFlags = nFlags
return deserialized
def stream_serialize(self, f):
|
__all__ = (
'MurmurHash3',
'CBloomFilter',
)
|
if sys.version > '3':
bitcoin.core.serialize.BytesSerializer.stream_serialize(self.vData, f)
else:
# 2.7 has problems with f.write(bytearray())
bitcoin.core.serialize.BytesSerializer.stream_serialize(bytes(self.vData), f)
f.write(self.__struct.pack(self.nHashFuncs, self.nTweak, self.nFlags))
|
manage.py
|
#!/usr/bin/env python
import os
import sys
from pathlib import Path
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django # noqa
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
|
raise
# This allows easy placement of apps within the interior
# ocr_osler directory.
current_path = Path(__file__).parent.resolve()
sys.path.append(str(current_path / "ocr_osler"))
execute_from_command_line(sys.argv)
| |
product.js
|
import wepy from 'wepy';
import {request} from './index';
import {hasUserInfo} from '../utils';
export const productGetCategory = async (storeID) => {
return await request(
'category',
{
data: {
page: 1,
page_size: 1000,
shop: storeID,
},
},
);
};
export const productGetBanner = async (storeID) => {
return await request(
'banner',
{
data: {
page: 1,
page_size: 1000,
shop: storeID,
},
},
);
};
export const productGetDetail = async (url) => {
let rs = {};
|
rs = await request(
url,
{
loginRequired: hasUserInfo(),
}
);
return rs;
};
export const productGetList = async ({
store,
category = '',
next = '',
isGrouping = '',
productType = '',
keyword = '',
recommendation = '',
pageSize = 20,
}) => {
return await request(
next || 'goods',
{
data: next
? {}
: {
page_size: pageSize,
category,
shop: store,
search: keyword,
groupbuy: isGrouping,
model_type: productType,
recommendation,
},
loginRequired: hasUserInfo(),
},
);
};
export const productGetPoster = async (url) => {
const session = wepy.getStorageSync('session');
return await new Promise(
(resolve) => {
wepy.downloadFile({
url,
header: {
Authorization: `session ${session}`,
},
success: (res) => {
resolve(res);
},
fail: (err) => {
console.error(err);
},
});
},
);
};
export const productGetGroupsInfo = async (product) => {
return await request(
'ptgroup',
{
data: {
page: 1,
page_size: 1000,
ptgoods: product,
},
loginRequired: hasUserInfo(),
},
);
};
|
if (url)
|
node.go
|
package rbac
import (
"encoding/json"
// "github.com/astaxie/beego"
"github.com/astaxie/beego/orm"
m "github.com/hunterhug/AmazonBigSpiderWeb/models/admin"
)
type NodeController struct {
CommonController
}
func (this *NodeController) Rsp(status bool, str string) {
this.Data["json"] = &map[string]interface{}{"status": status, "info": str}
this.ServeJSON()
}
func (this *NodeController) Index() {
if this.IsAjax() {
groupid, _ := this.GetInt64("group_id")
nodes, count := m.GetNodelistByGroupid(groupid)
for i := 0; i < len(nodes); i++ {
nodes[i]["_parentId"] = nodes[i]["Pid"]
}
if len(nodes) < 1 {
nodes = []orm.Params{}
}
// beego.Trace("%v", nodes)
this.Data["json"] = &map[string]interface{}{"total": count, "rows": &nodes}
this.ServeJSON()
return
} else {
grouplist := m.GroupList()
b, _ := json.Marshal(grouplist)
this.Data["grouplist"] = string(b)
this.Layout = this.GetTemplate() + "/public/layout.html"
this.TplName = this.GetTemplate() + "/rbac/node.html"
}
}
func (this *NodeController) AddAndEdit() {
n := m.Node{}
if err := this.ParseForm(&n); err != nil {
//handle error
this.Rsp(false, err.Error())
|
var err error
Nid, _ := this.GetInt64("Id")
if Nid > 0 {
id, err = m.UpdateNode(&n)
} else {
group_id, _ := this.GetInt64("Group_id")
group := new(m.Group)
group.Id = group_id
n.Group = group
if n.Pid != 0 {
n1, _ := m.ReadNode(n.Pid)
n.Level = n1.Level + 1
} else {
n.Level = 1
}
id, err = m.AddNode(&n)
}
if err == nil && id > 0 {
this.Rsp(true, "Success")
return
} else {
this.Rsp(false, err.Error())
return
}
}
func (this *NodeController) DelNode() {
Id, _ := this.GetInt64("Id")
status, err := m.DelNodeById(Id)
if err == nil && status > 0 {
this.Rsp(true, "Success")
return
} else {
this.Rsp(false, err.Error())
return
}
}
|
return
}
var id int64
|
GameList.py
|
import pickle
from py.game_logic.Game import Game
GAMES_FILE = 'db/GAMELIST.pickle'
class GameList(object):
|
def __init__(self):
self.games = [Game()]
def __len__(self):
return len(self.games)
def addGame(self):
self.games.append(Game())
# this turned out to be more trouble than it was worth:
# def pickle(self):
# '''
# Removes down non-pickleable attributes and saves what it can to file.
# This should be an uncommon operation, used only to preserve game-states for next time the
# server comes back up.
# '''
# with open(GAMES_FILE, 'wb') as f:
# pickle.dump(self, f,-1)
# print str(len(self))+' games-in-progress pickled.'
def unpickle(self):
try:
with open(GAMES_FILE, 'rb') as f:
self = pickle.load(f)
except (EOFError, IOError):
print 'No pickled games-in-progress found. Starting from scratch.'
def joinGame(self,userObj):
# connects given user object to best game
# if user already in a game, returns that one
# else finds open slot
game = self._inGame(userObj)
if game:
userObj.game = game
return game
else:
game = self.__findOpenSlot(userObj)
game.addPlayer(userObj)
return game
def _inGame(self,user):
# returns game obj if user is in a game, else returns None
for game in self.games:
if game.inGame(user.name):
return game
else:
return None
def __findOpenSlot(self,user):
# returns the best game for a new user to join
# NOTE: just uses 1 game for now...
selectedGame = self.games[0]
return selectedGame
# DEPRECIATED
def findOpenSlot(self,user):
# returns the best open slot for a new user to join a game
# adds the player to the game, and returns that game object
# NOTE: just uses 1 game for now...
selectedGame = self.games[0]
selectedGame.addPlayer(user)
return selectedGame
|
|
pod.go
|
package pod
import (
"github.com/elastic/beats/libbeat/common"
"github.com/elastic/beats/libbeat/logp"
"github.com/elastic/beats/metricbeat/helper"
"github.com/elastic/beats/metricbeat/mb"
"github.com/elastic/beats/metricbeat/mb/parse"
)
const (
defaultScheme = "http"
defaultPath = "/stats/summary"
)
var (
hostParser = parse.URLHostParserBuilder{
DefaultScheme: defaultScheme,
DefaultPath: defaultPath,
}.Build()
)
// init registers the MetricSet with the central registry.
// The New method will be called after the setup of the module and before starting to fetch data
func init() {
if err := mb.Registry.AddMetricSet("kubelet", "pod", New, hostParser); err != nil {
panic(err)
}
}
// MetricSet type defines all fields of the MetricSet
// As a minimum it must inherit the mb.BaseMetricSet fields, but can be extended with
// additional entries. These variables can be used to persist data or configuration between
// multiple fetch calls.
type MetricSet struct {
mb.BaseMetricSet
http *helper.HTTP
}
// New create a new instance of the MetricSet
// Part of new is also setting up the configuration by processing additional
// configuration entries if needed.
func New(base mb.BaseMetricSet) (mb.MetricSet, error) {
logp.Beta("The kubelet pod metricset is beta")
return &MetricSet{
BaseMetricSet: base,
http: helper.NewHTTP(base),
}, nil
}
// Fetch methods implements the data gathering and data conversion to the right format
// It returns the event which is then forward to the output. In case of an error, a
// descriptive error must be returned.
func (m *MetricSet) Fetch() ([]common.MapStr, error) {
body, err := m.http.FetchContent()
if err != nil {
return nil, err
}
events, err := eventMapping(body)
if err != nil
|
return events, nil
}
|
{
return nil, err
}
|
api.indices.close.go
|
// Code generated from specification version 7.0.0 (5e798c1): DO NOT EDIT
package esapi
import (
"context"
"strconv"
"strings"
"time"
)
func newIndicesCloseFunc(t Transport) IndicesClose {
return func(index []string, o ...func(*IndicesCloseRequest)) (*Response, error) {
var r = IndicesCloseRequest{Index: index}
for _, f := range o {
f(&r)
}
return r.Do(r.ctx, t)
}
}
// ----- API Definition -------------------------------------------------------
// IndicesClose closes an index.
//
// See full documentation at http://www.elastic.co/guide/en/elasticsearch/reference/master/indices-open-close.html.
//
type IndicesClose func(index []string, o ...func(*IndicesCloseRequest)) (*Response, error)
// IndicesCloseRequest configures the Indices Close API request.
//
type IndicesCloseRequest struct {
Index []string
AllowNoIndices *bool
ExpandWildcards string
IgnoreUnavailable *bool
MasterTimeout time.Duration
Timeout time.Duration
Pretty bool
Human bool
ErrorTrace bool
FilterPath []string
ctx context.Context
}
// Do executes the request and returns response or error.
//
func (r IndicesCloseRequest) Do(ctx context.Context, transport Transport) (*Response, error) {
var (
method string
path strings.Builder
params map[string]string
)
method = "POST"
path.Grow(1 + len(strings.Join(r.Index, ",")) + 1 + len("_close"))
path.WriteString("/")
path.WriteString(strings.Join(r.Index, ","))
path.WriteString("/")
path.WriteString("_close")
params = make(map[string]string)
if r.AllowNoIndices != nil {
params["allow_no_indices"] = strconv.FormatBool(*r.AllowNoIndices)
}
if r.ExpandWildcards != "" {
params["expand_wildcards"] = r.ExpandWildcards
}
if r.IgnoreUnavailable != nil {
params["ignore_unavailable"] = strconv.FormatBool(*r.IgnoreUnavailable)
}
if r.MasterTimeout != 0 {
params["master_timeout"] = time.Duration(r.MasterTimeout * time.Millisecond).String()
}
if r.Timeout != 0 {
params["timeout"] = time.Duration(r.Timeout * time.Millisecond).String()
}
if r.Pretty {
params["pretty"] = "true"
}
if r.Human {
params["human"] = "true"
}
if r.ErrorTrace {
params["error_trace"] = "true"
}
if len(r.FilterPath) > 0 {
params["filter_path"] = strings.Join(r.FilterPath, ",")
}
req, _ := newRequest(method, path.String(), nil)
if len(params) > 0 {
q := req.URL.Query()
for k, v := range params {
q.Set(k, v)
}
req.URL.RawQuery = q.Encode()
}
if ctx != nil {
req = req.WithContext(ctx)
}
res, err := transport.Perform(req)
if err != nil {
return nil, err
}
response := Response{
StatusCode: res.StatusCode,
Body: res.Body,
Header: res.Header,
}
return &response, nil
}
// WithContext sets the request context.
//
func (f IndicesClose) WithContext(v context.Context) func(*IndicesCloseRequest) {
return func(r *IndicesCloseRequest) {
r.ctx = v
}
}
// WithAllowNoIndices - whether to ignore if a wildcard indices expression resolves into no concrete indices. (this includes `_all` string or when no indices have been specified).
//
func (f IndicesClose) WithAllowNoIndices(v bool) func(*IndicesCloseRequest) {
return func(r *IndicesCloseRequest) {
r.AllowNoIndices = &v
}
}
// WithExpandWildcards - whether to expand wildcard expression to concrete indices that are open, closed or both..
//
func (f IndicesClose) WithExpandWildcards(v string) func(*IndicesCloseRequest) {
return func(r *IndicesCloseRequest) {
r.ExpandWildcards = v
}
}
// WithIgnoreUnavailable - whether specified concrete indices should be ignored when unavailable (missing or closed).
//
func (f IndicesClose) WithIgnoreUnavailable(v bool) func(*IndicesCloseRequest) {
return func(r *IndicesCloseRequest) {
r.IgnoreUnavailable = &v
}
}
// WithMasterTimeout - specify timeout for connection to master.
//
func (f IndicesClose) WithMasterTimeout(v time.Duration) func(*IndicesCloseRequest) {
return func(r *IndicesCloseRequest) {
r.MasterTimeout = v
}
}
// WithTimeout - explicit operation timeout.
//
func (f IndicesClose) WithTimeout(v time.Duration) func(*IndicesCloseRequest) {
return func(r *IndicesCloseRequest) {
r.Timeout = v
}
}
// WithPretty makes the response body pretty-printed.
//
func (f IndicesClose) WithPretty() func(*IndicesCloseRequest) {
return func(r *IndicesCloseRequest) {
r.Pretty = true
}
}
// WithHuman makes statistical values human-readable.
//
func (f IndicesClose) WithHuman() func(*IndicesCloseRequest) {
return func(r *IndicesCloseRequest) {
r.Human = true
}
|
func (f IndicesClose) WithErrorTrace() func(*IndicesCloseRequest) {
return func(r *IndicesCloseRequest) {
r.ErrorTrace = true
}
}
// WithFilterPath filters the properties of the response body.
//
func (f IndicesClose) WithFilterPath(v ...string) func(*IndicesCloseRequest) {
return func(r *IndicesCloseRequest) {
r.FilterPath = v
}
}
|
}
// WithErrorTrace includes the stack trace for errors in the response body.
//
|
related_0.js
|
var searchData=
[
['anymetadata_0',['AnyMetadata',['../classoperations__research_1_1_m_p_array_with_constant_constraint.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::MPArrayWithConstantConstraint::AnyMetadata()'],['../classoperations__research_1_1_flow_node_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::FlowNodeProto::AnyMetadata()'],['../classoperations__research_1_1_flow_model_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::FlowModelProto::AnyMetadata()'],['../classoperations__research_1_1_g_scip_parameters.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::GScipParameters::AnyMetadata()'],['../classoperations__research_1_1_g_scip_solving_stats.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::GScipSolvingStats::AnyMetadata()'],['../classoperations__research_1_1_g_scip_output.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::GScipOutput::AnyMetadata()'],['../classoperations__research_1_1_m_p_variable_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::MPVariableProto::AnyMetadata()'],['../classoperations__research_1_1_m_p_constraint_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::MPConstraintProto::AnyMetadata()'],['../classoperations__research_1_1_m_p_general_constraint_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::MPGeneralConstraintProto::AnyMetadata()'],['../classoperations__research_1_1_m_p_indicator_constraint.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::MPIndicatorConstraint::AnyMetadata()'],['../classoperations__research_1_1_m_p_sos_constraint.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::MPSosConstraint::AnyMetadata()'],['../classoperations__research_1_1_m_p_quadratic_constraint.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::MPQuadraticConstraint::AnyMetadata()'],['../classoperations__research_1_1_m_p_abs_constraint.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::MPAbsConstraint::AnyMetadata()'],['../classoperations__research_1_1_m_p_array_constraint.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::MPArrayConstraint::AnyMetadata()'],['../classoperations__research_1_1_flow_arc_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::FlowArcProto::AnyMetadata()'],['../classoperations__research_1_1_m_p_quadratic_objective.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::MPQuadraticObjective::AnyMetadata()'],['../classoperations__research_1_1_partial_variable_assignment.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::PartialVariableAssignment::AnyMetadata()'],['../classoperations__research_1_1_m_p_model_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::MPModelProto::AnyMetadata()'],['../classoperations__research_1_1_optional_double.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::OptionalDouble::AnyMetadata()'],['../classoperations__research_1_1_m_p_solver_common_parameters.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::MPSolverCommonParameters::AnyMetadata()'],['../classoperations__research_1_1_m_p_model_delta_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::MPModelDeltaProto::AnyMetadata()'],['../classoperations__research_1_1_m_p_model_request.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::MPModelRequest::AnyMetadata()'],['../classoperations__research_1_1_m_p_solution.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::MPSolution::AnyMetadata()'],['../classoperations__research_1_1_m_p_solve_info.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::MPSolveInfo::AnyMetadata()'],['../classoperations__research_1_1_m_p_solution_response.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::MPSolutionResponse::AnyMetadata()'],['../classoperations__research_1_1packing_1_1vbp_1_1_item.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::packing::vbp::Item::AnyMetadata()'],['../classoperations__research_1_1packing_1_1vbp_1_1_vector_bin_packing_problem.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::packing::vbp::VectorBinPackingProblem::AnyMetadata()'],['../classoperations__research_1_1_routing_search_parameters___local_search_neighborhood_operators.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::RoutingSearchParameters_LocalSearchNeighborhoodOperators::AnyMetadata()'],['../classoperations__research_1_1bop_1_1_bop_optimizer_method.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::bop::BopOptimizerMethod::AnyMetadata()'],['../classoperations__research_1_1bop_1_1_bop_solver_optimizer_set.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::bop::BopSolverOptimizerSet::AnyMetadata()'],['../classoperations__research_1_1bop_1_1_bop_parameters.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::bop::BopParameters::AnyMetadata()'],['../classoperations__research_1_1_int_var_assignment.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::IntVarAssignment::AnyMetadata()'],['../classoperations__research_1_1_interval_var_assignment.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::IntervalVarAssignment::AnyMetadata()'],['../classoperations__research_1_1_sequence_var_assignment.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::SequenceVarAssignment::AnyMetadata()'],['../classoperations__research_1_1_worker_info.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::WorkerInfo::AnyMetadata()'],['../classoperations__research_1_1_assignment_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::AssignmentProto::AnyMetadata()'],['../classoperations__research_1_1_demon_runs.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::DemonRuns::AnyMetadata()'],['../classoperations__research_1_1_constraint_runs.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::ConstraintRuns::AnyMetadata()'],['../classoperations__research_1_1_first_solution_strategy.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::FirstSolutionStrategy::AnyMetadata()'],['../classoperations__research_1_1_local_search_metaheuristic.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::LocalSearchMetaheuristic::AnyMetadata()'],['../classoperations__research_1_1packing_1_1vbp_1_1_vector_bin_packing_one_bin_in_solution.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::packing::vbp::VectorBinPackingOneBinInSolution::AnyMetadata()'],['../classoperations__research_1_1_routing_search_parameters___improvement_search_limit_parameters.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::RoutingSearchParameters_ImprovementSearchLimitParameters::AnyMetadata()'],['../classoperations__research_1_1_routing_search_parameters.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::RoutingSearchParameters::AnyMetadata()'],['../classoperations__research_1_1_routing_model_parameters.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::RoutingModelParameters::AnyMetadata()'],['../classoperations__research_1_1_regular_limit_parameters.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::RegularLimitParameters::AnyMetadata()'],['../classoperations__research_1_1_local_search_statistics___first_solution_statistics.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::LocalSearchStatistics_FirstSolutionStatistics::AnyMetadata()'],['../classoperations__research_1_1_local_search_statistics___local_search_operator_statistics.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::LocalSearchStatistics_LocalSearchOperatorStatistics::AnyMetadata()'],['../classoperations__research_1_1_local_search_statistics___local_search_filter_statistics.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::LocalSearchStatistics_LocalSearchFilterStatistics::AnyMetadata()'],['../classoperations__research_1_1_local_search_statistics.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::LocalSearchStatistics::AnyMetadata()'],['../classoperations__research_1_1_constraint_solver_statistics.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::ConstraintSolverStatistics::AnyMetadata()'],['../classoperations__research_1_1_search_statistics.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::SearchStatistics::AnyMetadata()'],['../classoperations__research_1_1_constraint_solver_parameters.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::ConstraintSolverParameters::AnyMetadata()'],['../classoperations__research_1_1glop_1_1_glop_parameters.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::glop::GlopParameters::AnyMetadata()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_transition_time_matrix.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::scheduling::jssp::TransitionTimeMatrix::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_decision_strategy_proto___affine_transformation.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::DecisionStrategyProto_AffineTransformation::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_decision_strategy_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::DecisionStrategyProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_partial_variable_assignment.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::PartialVariableAssignment::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_sparse_permutation_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::SparsePermutationProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_dense_matrix_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::DenseMatrixProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_symmetry_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::SymmetryProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_cp_model_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::CpModelProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_cp_solver_solution.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::CpSolverSolution::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_cp_solver_response.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::CpSolverResponse::AnyMetadata()'],['../classoperations__research_1_1sat_1_1v1_1_1_cp_solver_request.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::v1::CpSolverRequest::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_sat_parameters.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::SatParameters::AnyMetadata()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_task.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::scheduling::jssp::Task::AnyMetadata()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_job.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::scheduling::jssp::Job::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_float_objective_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::FloatObjectiveProto::AnyMetadata()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_machine.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::scheduling::jssp::Machine::AnyMetadata()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_job_precedence.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::scheduling::jssp::JobPrecedence::AnyMetadata()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_jssp_input_problem.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::scheduling::jssp::JsspInputProblem::AnyMetadata()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_assigned_task.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::scheduling::jssp::AssignedTask::AnyMetadata()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_assigned_job.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::scheduling::jssp::AssignedJob::AnyMetadata()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_jssp_output_solution.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::scheduling::jssp::JsspOutputSolution::AnyMetadata()'],['../classoperations__research_1_1scheduling_1_1rcpsp_1_1_resource.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::scheduling::rcpsp::Resource::AnyMetadata()'],['../classoperations__research_1_1scheduling_1_1rcpsp_1_1_recipe.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::scheduling::rcpsp::Recipe::AnyMetadata()'],['../classoperations__research_1_1scheduling_1_1rcpsp_1_1_per_recipe_delays.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::scheduling::rcpsp::PerRecipeDelays::AnyMetadata()'],['../classoperations__research_1_1scheduling_1_1rcpsp_1_1_per_successor_delays.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::scheduling::rcpsp::PerSuccessorDelays::AnyMetadata()'],['../classoperations__research_1_1scheduling_1_1rcpsp_1_1_task.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::scheduling::rcpsp::Task::AnyMetadata()'],['../classoperations__research_1_1scheduling_1_1rcpsp_1_1_rcpsp_problem.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::scheduling::rcpsp::RcpspProblem::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_interval_constraint_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::IntervalConstraintProto::AnyMetadata()'],['../classoperations__research_1_1packing_1_1vbp_1_1_vector_bin_packing_solution.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::packing::vbp::VectorBinPackingSolution::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_linear_boolean_constraint.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::LinearBooleanConstraint::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_linear_objective.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::LinearObjective::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_boolean_assignment.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::BooleanAssignment::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_linear_boolean_problem.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::LinearBooleanProblem::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_integer_variable_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::IntegerVariableProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_bool_argument_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::BoolArgumentProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_linear_expression_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::LinearExpressionProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_linear_argument_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::LinearArgumentProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_all_different_constraint_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::AllDifferentConstraintProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_linear_constraint_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::LinearConstraintProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_element_constraint_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::ElementConstraintProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_no_overlap_constraint_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::NoOverlapConstraintProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_no_overlap2_d_constraint_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::NoOverlap2DConstraintProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_cumulative_constraint_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::CumulativeConstraintProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_reservoir_constraint_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::ReservoirConstraintProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_circuit_constraint_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::CircuitConstraintProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_routes_constraint_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::RoutesConstraintProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_table_constraint_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::TableConstraintProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_inverse_constraint_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::InverseConstraintProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_automaton_constraint_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::AutomatonConstraintProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_list_of_variables_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::ListOfVariablesProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_constraint_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::ConstraintProto::AnyMetadata()'],['../classoperations__research_1_1sat_1_1_cp_objective_proto.html#a9b35d94da3444084fc3673b7717b6cfe',1,'operations_research::sat::CpObjectiveProto::AnyMetadata()']]],
['internalhelper_1',['InternalHelper',['../classoperations__research_1_1bop_1_1_bop_solver_optimizer_set.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::bop::BopSolverOptimizerSet::InternalHelper()'],['../classoperations__research_1_1_m_p_array_with_constant_constraint.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::MPArrayWithConstantConstraint::InternalHelper()'],['../classoperations__research_1_1_flow_node_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::FlowNodeProto::InternalHelper()'],['../classoperations__research_1_1_flow_model_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::FlowModelProto::InternalHelper()'],['../classoperations__research_1_1_g_scip_parameters.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::GScipParameters::InternalHelper()'],['../classoperations__research_1_1_g_scip_solving_stats.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::GScipSolvingStats::InternalHelper()'],['../classoperations__research_1_1_g_scip_output.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::GScipOutput::InternalHelper()'],['../classoperations__research_1_1_m_p_variable_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::MPVariableProto::InternalHelper()'],['../classoperations__research_1_1_m_p_constraint_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::MPConstraintProto::InternalHelper()'],['../classoperations__research_1_1_m_p_general_constraint_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::MPGeneralConstraintProto::InternalHelper()'],['../classoperations__research_1_1_m_p_indicator_constraint.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::MPIndicatorConstraint::InternalHelper()'],['../classoperations__research_1_1_m_p_sos_constraint.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::MPSosConstraint::InternalHelper()'],['../classoperations__research_1_1_m_p_quadratic_constraint.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::MPQuadraticConstraint::InternalHelper()'],['../classoperations__research_1_1_m_p_abs_constraint.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::MPAbsConstraint::InternalHelper()'],['../classoperations__research_1_1_m_p_array_constraint.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::MPArrayConstraint::InternalHelper()'],['../classoperations__research_1_1_flow_arc_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::FlowArcProto::InternalHelper()'],['../classoperations__research_1_1_m_p_quadratic_objective.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::MPQuadraticObjective::InternalHelper()'],['../classoperations__research_1_1_partial_variable_assignment.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::PartialVariableAssignment::InternalHelper()'],['../classoperations__research_1_1_m_p_model_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::MPModelProto::InternalHelper()'],['../classoperations__research_1_1_optional_double.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::OptionalDouble::InternalHelper()'],['../classoperations__research_1_1_m_p_solver_common_parameters.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::MPSolverCommonParameters::InternalHelper()'],['../classoperations__research_1_1_m_p_model_delta_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::MPModelDeltaProto::InternalHelper()'],['../classoperations__research_1_1_m_p_model_request.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::MPModelRequest::InternalHelper()'],['../classoperations__research_1_1_m_p_solution.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::MPSolution::InternalHelper()'],['../classoperations__research_1_1_m_p_solve_info.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::MPSolveInfo::InternalHelper()'],['../classoperations__research_1_1_m_p_solution_response.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::MPSolutionResponse::InternalHelper()'],['../classoperations__research_1_1packing_1_1vbp_1_1_item.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::packing::vbp::Item::InternalHelper()'],['../classoperations__research_1_1packing_1_1vbp_1_1_vector_bin_packing_problem.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::packing::vbp::VectorBinPackingProblem::InternalHelper()'],['../classoperations__research_1_1_routing_search_parameters___local_search_neighborhood_operators.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::RoutingSearchParameters_LocalSearchNeighborhoodOperators::InternalHelper()'],['../classoperations__research_1_1scheduling_1_1rcpsp_1_1_rcpsp_problem.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::scheduling::rcpsp::RcpspProblem::InternalHelper()'],['../classoperations__research_1_1bop_1_1_bop_optimizer_method.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::bop::BopOptimizerMethod::InternalHelper()'],['../classoperations__research_1_1bop_1_1_bop_parameters.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::bop::BopParameters::InternalHelper()'],['../classoperations__research_1_1_int_var_assignment.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::IntVarAssignment::InternalHelper()'],['../classoperations__research_1_1_interval_var_assignment.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::IntervalVarAssignment::InternalHelper()'],['../classoperations__research_1_1_sequence_var_assignment.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::SequenceVarAssignment::InternalHelper()'],['../classoperations__research_1_1_worker_info.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::WorkerInfo::InternalHelper()'],['../classoperations__research_1_1_assignment_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::AssignmentProto::InternalHelper()'],['../classoperations__research_1_1_demon_runs.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::DemonRuns::InternalHelper()'],['../classoperations__research_1_1_constraint_runs.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::ConstraintRuns::InternalHelper()'],['../classoperations__research_1_1_first_solution_strategy.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::FirstSolutionStrategy::InternalHelper()'],['../classoperations__research_1_1_local_search_metaheuristic.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::LocalSearchMetaheuristic::InternalHelper()'],['../classoperations__research_1_1scheduling_1_1rcpsp_1_1_task.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::scheduling::rcpsp::Task::InternalHelper()'],['../classoperations__research_1_1_routing_search_parameters___improvement_search_limit_parameters.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::RoutingSearchParameters_ImprovementSearchLimitParameters::InternalHelper()'],['../classoperations__research_1_1_routing_search_parameters.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::RoutingSearchParameters::InternalHelper()'],['../classoperations__research_1_1_routing_model_parameters.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::RoutingModelParameters::InternalHelper()'],['../classoperations__research_1_1_regular_limit_parameters.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::RegularLimitParameters::InternalHelper()'],['../classoperations__research_1_1_local_search_statistics___first_solution_statistics.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::LocalSearchStatistics_FirstSolutionStatistics::InternalHelper()'],['../classoperations__research_1_1_local_search_statistics___local_search_operator_statistics.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::LocalSearchStatistics_LocalSearchOperatorStatistics::InternalHelper()'],['../classoperations__research_1_1_local_search_statistics___local_search_filter_statistics.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::LocalSearchStatistics_LocalSearchFilterStatistics::InternalHelper()'],['../classoperations__research_1_1_local_search_statistics.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::LocalSearchStatistics::InternalHelper()'],['../classoperations__research_1_1_constraint_solver_statistics.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::ConstraintSolverStatistics::InternalHelper()'],['../classoperations__research_1_1_search_statistics.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::SearchStatistics::InternalHelper()'],['../classoperations__research_1_1_constraint_solver_parameters.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::ConstraintSolverParameters::InternalHelper()'],['../classoperations__research_1_1glop_1_1_glop_parameters.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::glop::GlopParameters::InternalHelper()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_task.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::scheduling::jssp::Task::InternalHelper()'],['../classoperations__research_1_1sat_1_1_float_objective_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::FloatObjectiveProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_decision_strategy_proto___affine_transformation.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::DecisionStrategyProto_AffineTransformation::InternalHelper()'],['../classoperations__research_1_1sat_1_1_decision_strategy_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::DecisionStrategyProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_partial_variable_assignment.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::PartialVariableAssignment::InternalHelper()'],['../classoperations__research_1_1sat_1_1_sparse_permutation_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::SparsePermutationProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_dense_matrix_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::DenseMatrixProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_symmetry_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::SymmetryProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_cp_model_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::CpModelProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_cp_solver_solution.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::CpSolverSolution::InternalHelper()'],['../classoperations__research_1_1sat_1_1_cp_solver_response.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::CpSolverResponse::InternalHelper()'],['../classoperations__research_1_1sat_1_1v1_1_1_cp_solver_request.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::v1::CpSolverRequest::InternalHelper()'],['../classoperations__research_1_1sat_1_1_sat_parameters.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::SatParameters::InternalHelper()'],['../classoperations__research_1_1packing_1_1vbp_1_1_vector_bin_packing_solution.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::packing::vbp::VectorBinPackingSolution::InternalHelper()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_job.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::scheduling::jssp::Job::InternalHelper()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_transition_time_matrix.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::scheduling::jssp::TransitionTimeMatrix::InternalHelper()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_machine.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::scheduling::jssp::Machine::InternalHelper()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_job_precedence.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::scheduling::jssp::JobPrecedence::InternalHelper()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_jssp_input_problem.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::scheduling::jssp::JsspInputProblem::InternalHelper()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_assigned_task.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::scheduling::jssp::AssignedTask::InternalHelper()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_assigned_job.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::scheduling::jssp::AssignedJob::InternalHelper()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_jssp_output_solution.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::scheduling::jssp::JsspOutputSolution::InternalHelper()'],['../classoperations__research_1_1scheduling_1_1rcpsp_1_1_resource.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::scheduling::rcpsp::Resource::InternalHelper()'],['../classoperations__research_1_1scheduling_1_1rcpsp_1_1_recipe.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::scheduling::rcpsp::Recipe::InternalHelper()'],['../classoperations__research_1_1scheduling_1_1rcpsp_1_1_per_recipe_delays.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::scheduling::rcpsp::PerRecipeDelays::InternalHelper()'],['../classoperations__research_1_1scheduling_1_1rcpsp_1_1_per_successor_delays.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::scheduling::rcpsp::PerSuccessorDelays::InternalHelper()'],['../classoperations__research_1_1sat_1_1_constraint_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::ConstraintProto::InternalHelper()'],['../classoperations__research_1_1packing_1_1vbp_1_1_vector_bin_packing_one_bin_in_solution.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::packing::vbp::VectorBinPackingOneBinInSolution::InternalHelper()'],['../classoperations__research_1_1sat_1_1_linear_boolean_constraint.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::LinearBooleanConstraint::InternalHelper()'],['../classoperations__research_1_1sat_1_1_linear_objective.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::LinearObjective::InternalHelper()'],['../classoperations__research_1_1sat_1_1_boolean_assignment.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::BooleanAssignment::InternalHelper()'],['../classoperations__research_1_1sat_1_1_linear_boolean_problem.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::LinearBooleanProblem::InternalHelper()'],['../classoperations__research_1_1sat_1_1_integer_variable_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::IntegerVariableProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_bool_argument_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::BoolArgumentProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_linear_expression_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::LinearExpressionProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_linear_argument_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::LinearArgumentProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_all_different_constraint_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::AllDifferentConstraintProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_linear_constraint_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::LinearConstraintProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_element_constraint_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::ElementConstraintProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_interval_constraint_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::IntervalConstraintProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_no_overlap_constraint_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::NoOverlapConstraintProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_no_overlap2_d_constraint_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::NoOverlap2DConstraintProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_cumulative_constraint_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::CumulativeConstraintProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_reservoir_constraint_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::ReservoirConstraintProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_circuit_constraint_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::CircuitConstraintProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_routes_constraint_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::RoutesConstraintProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_table_constraint_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::TableConstraintProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_inverse_constraint_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::InverseConstraintProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_automaton_constraint_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::AutomatonConstraintProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_list_of_variables_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::ListOfVariablesProto::InternalHelper()'],['../classoperations__research_1_1sat_1_1_cp_objective_proto.html#ab618dbbac4a8d749da0d85c32932df36',1,'operations_research::sat::CpObjectiveProto::InternalHelper()']]],
['tablestruct_5fortools_5f2fbop_5f2fbop_5f5fparameters_5f2eproto_2',['TableStruct_ortools_2fbop_2fbop_5fparameters_2eproto',['../classoperations__research_1_1bop_1_1_bop_optimizer_method.html#a92bb5bd069485c6bf8b11d8722ea3507',1,'operations_research::bop::BopOptimizerMethod::TableStruct_ortools_2fbop_2fbop_5fparameters_2eproto()'],['../classoperations__research_1_1bop_1_1_bop_parameters.html#a92bb5bd069485c6bf8b11d8722ea3507',1,'operations_research::bop::BopParameters::TableStruct_ortools_2fbop_2fbop_5fparameters_2eproto()'],['../classoperations__research_1_1bop_1_1_bop_solver_optimizer_set.html#a92bb5bd069485c6bf8b11d8722ea3507',1,'operations_research::bop::BopSolverOptimizerSet::TableStruct_ortools_2fbop_2fbop_5fparameters_2eproto()']]],
['tablestruct_5fortools_5f2fconstraint_5f5fsolver_5f2fassignment_5f2eproto_3',['TableStruct_ortools_2fconstraint_5fsolver_2fassignment_2eproto',['../classoperations__research_1_1_int_var_assignment.html#a14020474561cb273cbcf523fb07cd2f6',1,'operations_research::IntVarAssignment::TableStruct_ortools_2fconstraint_5fsolver_2fassignment_2eproto()'],['../classoperations__research_1_1_assignment_proto.html#a14020474561cb273cbcf523fb07cd2f6',1,'operations_research::AssignmentProto::TableStruct_ortools_2fconstraint_5fsolver_2fassignment_2eproto()'],['../classoperations__research_1_1_worker_info.html#a14020474561cb273cbcf523fb07cd2f6',1,'operations_research::WorkerInfo::TableStruct_ortools_2fconstraint_5fsolver_2fassignment_2eproto()'],['../classoperations__research_1_1_interval_var_assignment.html#a14020474561cb273cbcf523fb07cd2f6',1,'operations_research::IntervalVarAssignment::TableStruct_ortools_2fconstraint_5fsolver_2fassignment_2eproto()'],['../classoperations__research_1_1_sequence_var_assignment.html#a14020474561cb273cbcf523fb07cd2f6',1,'operations_research::SequenceVarAssignment::TableStruct_ortools_2fconstraint_5fsolver_2fassignment_2eproto()']]],
['tablestruct_5fortools_5f2fconstraint_5f5fsolver_5f2fdemon_5f5fprofiler_5f2eproto_4',['TableStruct_ortools_2fconstraint_5fsolver_2fdemon_5fprofiler_2eproto',['../classoperations__research_1_1_demon_runs.html#a7027731d4aebf5bbbad9fcb0292e7f5c',1,'operations_research::DemonRuns::TableStruct_ortools_2fconstraint_5fsolver_2fdemon_5fprofiler_2eproto()'],['../classoperations__research_1_1_constraint_runs.html#a7027731d4aebf5bbbad9fcb0292e7f5c',1,'operations_research::ConstraintRuns::TableStruct_ortools_2fconstraint_5fsolver_2fdemon_5fprofiler_2eproto()']]],
['tablestruct_5fortools_5f2fconstraint_5f5fsolver_5f2frouting_5f5fenums_5f2eproto_5',['TableStruct_ortools_2fconstraint_5fsolver_2frouting_5fenums_2eproto',['../classoperations__research_1_1_first_solution_strategy.html#ada67e3eb964f85aa7e57c01e29ea271a',1,'operations_research::FirstSolutionStrategy::TableStruct_ortools_2fconstraint_5fsolver_2frouting_5fenums_2eproto()'],['../classoperations__research_1_1_local_search_metaheuristic.html#ada67e3eb964f85aa7e57c01e29ea271a',1,'operations_research::LocalSearchMetaheuristic::TableStruct_ortools_2fconstraint_5fsolver_2frouting_5fenums_2eproto()']]],
['tablestruct_5fortools_5f2fconstraint_5f5fsolver_5f2frouting_5f5fparameters_5f2eproto_6',['TableStruct_ortools_2fconstraint_5fsolver_2frouting_5fparameters_2eproto',['../classoperations__research_1_1_routing_search_parameters___local_search_neighborhood_operators.html#a3725cba34a2c4cf2f5bedae614b8f396',1,'operations_research::RoutingSearchParameters_LocalSearchNeighborhoodOperators::TableStruct_ortools_2fconstraint_5fsolver_2frouting_5fparameters_2eproto()'],['../classoperations__research_1_1_routing_search_parameters___improvement_search_limit_parameters.html#a3725cba34a2c4cf2f5bedae614b8f396',1,'operations_research::RoutingSearchParameters_ImprovementSearchLimitParameters::TableStruct_ortools_2fconstraint_5fsolver_2frouting_5fparameters_2eproto()'],['../classoperations__research_1_1_routing_search_parameters.html#a3725cba34a2c4cf2f5bedae614b8f396',1,'operations_research::RoutingSearchParameters::TableStruct_ortools_2fconstraint_5fsolver_2frouting_5fparameters_2eproto()'],['../classoperations__research_1_1_routing_model_parameters.html#a3725cba34a2c4cf2f5bedae614b8f396',1,'operations_research::RoutingModelParameters::TableStruct_ortools_2fconstraint_5fsolver_2frouting_5fparameters_2eproto()']]],
['tablestruct_5fortools_5f2fconstraint_5f5fsolver_5f2fsearch_5f5flimit_5f2eproto_7',['TableStruct_ortools_2fconstraint_5fsolver_2fsearch_5flimit_2eproto',['../classoperations__research_1_1_regular_limit_parameters.html#a0a43070e014de5e17f863498060c0dea',1,'operations_research::RegularLimitParameters']]],
['tablestruct_5fortools_5f2fconstraint_5f5fsolver_5f2fsearch_5f5fstats_5f2eproto_8',['TableStruct_ortools_2fconstraint_5fsolver_2fsearch_5fstats_2eproto',['../classoperations__research_1_1_local_search_statistics___first_solution_statistics.html#ad51e867be311d0d31072b0b2cc258532',1,'operations_research::LocalSearchStatistics_FirstSolutionStatistics::TableStruct_ortools_2fconstraint_5fsolver_2fsearch_5fstats_2eproto()'],['../classoperations__research_1_1_local_search_statistics___local_search_operator_statistics.html#ad51e867be311d0d31072b0b2cc258532',1,'operations_research::LocalSearchStatistics_LocalSearchOperatorStatistics::TableStruct_ortools_2fconstraint_5fsolver_2fsearch_5fstats_2eproto()'],['../classoperations__research_1_1_local_search_statistics___local_search_filter_statistics.html#ad51e867be311d0d31072b0b2cc258532',1,'operations_research::LocalSearchStatistics_LocalSearchFilterStatistics::TableStruct_ortools_2fconstraint_5fsolver_2fsearch_5fstats_2eproto()'],['../classoperations__research_1_1_local_search_statistics.html#ad51e867be311d0d31072b0b2cc258532',1,'operations_research::LocalSearchStatistics::TableStruct_ortools_2fconstraint_5fsolver_2fsearch_5fstats_2eproto()'],['../classoperations__research_1_1_constraint_solver_statistics.html#ad51e867be311d0d31072b0b2cc258532',1,'operations_research::ConstraintSolverStatistics::TableStruct_ortools_2fconstraint_5fsolver_2fsearch_5fstats_2eproto()'],['../classoperations__research_1_1_search_statistics.html#ad51e867be311d0d31072b0b2cc258532',1,'operations_research::SearchStatistics::TableStruct_ortools_2fconstraint_5fsolver_2fsearch_5fstats_2eproto()']]],
['tablestruct_5fortools_5f2fconstraint_5f5fsolver_5f2fsolver_5f5fparameters_5f2eproto_9',['TableStruct_ortools_2fconstraint_5fsolver_2fsolver_5fparameters_2eproto',['../classoperations__research_1_1_constraint_solver_parameters.html#aa3d80eae38e6975356e594f16d4deb38',1,'operations_research::ConstraintSolverParameters']]],
['tablestruct_5fortools_5f2fglop_5f2fparameters_5f2eproto_10',['TableStruct_ortools_2fglop_2fparameters_2eproto',['../classoperations__research_1_1glop_1_1_glop_parameters.html#a3c578e3e34838195b423077164ab1bde',1,'operations_research::glop::GlopParameters']]],
['tablestruct_5fortools_5f2fgraph_5f2fflow_5f5fproblem_5f2eproto_11',['TableStruct_ortools_2fgraph_2fflow_5fproblem_2eproto',['../classoperations__research_1_1_flow_model_proto.html#a2d7fd8369ce911e1514d514079ebed45',1,'operations_research::FlowModelProto::TableStruct_ortools_2fgraph_2fflow_5fproblem_2eproto()'],['../classoperations__research_1_1_flow_node_proto.html#a2d7fd8369ce911e1514d514079ebed45',1,'operations_research::FlowNodeProto::TableStruct_ortools_2fgraph_2fflow_5fproblem_2eproto()'],['../classoperations__research_1_1_flow_arc_proto.html#a2d7fd8369ce911e1514d514079ebed45',1,'operations_research::FlowArcProto::TableStruct_ortools_2fgraph_2fflow_5fproblem_2eproto()']]],
['tablestruct_5fortools_5f2fgscip_5f2fgscip_5f2eproto_12',['TableStruct_ortools_2fgscip_2fgscip_2eproto',['../classoperations__research_1_1_g_scip_solving_stats.html#a8d1c945470a9411a0ec0c3c2a270fa0e',1,'operations_research::GScipSolvingStats::TableStruct_ortools_2fgscip_2fgscip_2eproto()'],['../classoperations__research_1_1_g_scip_output.html#a8d1c945470a9411a0ec0c3c2a270fa0e',1,'operations_research::GScipOutput::TableStruct_ortools_2fgscip_2fgscip_2eproto()'],['../classoperations__research_1_1_g_scip_parameters.html#a8d1c945470a9411a0ec0c3c2a270fa0e',1,'operations_research::GScipParameters::TableStruct_ortools_2fgscip_2fgscip_2eproto()']]],
['tablestruct_5fortools_5f2flinear_5f5fsolver_5f2flinear_5f5fsolver_5f2eproto_13',['TableStruct_ortools_2flinear_5fsolver_2flinear_5fsolver_2eproto',['../classoperations__research_1_1_m_p_array_with_constant_constraint.html#af910aca53a97ebc29b0a0b528eeb1671',1,'operations_research::MPArrayWithConstantConstraint::TableStruct_ortools_2flinear_5fsolver_2flinear_5fsolver_2eproto()'],['../classoperations__research_1_1_m_p_solution_response.html#af910aca53a97ebc29b0a0b528eeb1671',1,'operations_research::MPSolutionResponse::TableStruct_ortools_2flinear_5fsolver_2flinear_5fsolver_2eproto()'],['../classoperations__research_1_1_m_p_solve_info.html#af910aca53a97ebc29b0a0b528eeb1671',1,'operations_research::MPSolveInfo::TableStruct_ortools_2flinear_5fsolver_2flinear_5fsolver_2eproto()'],['../classoperations__research_1_1_m_p_solution.html#af910aca53a97ebc29b0a0b528eeb1671',1,'operations_research::MPSolution::TableStruct_ortools_2flinear_5fsolver_2flinear_5fsolver_2eproto()'],['../classoperations__research_1_1_m_p_model_request.html#af910aca53a97ebc29b0a0b528eeb1671',1,'operations_research::MPModelRequest::TableStruct_ortools_2flinear_5fsolver_2flinear_5fsolver_2eproto()'],['../classoperations__research_1_1_m_p_model_delta_proto.html#af910aca53a97ebc29b0a0b528eeb1671',1,'operations_research::MPModelDeltaProto::TableStruct_ortools_2flinear_5fsolver_2flinear_5fsolver_2eproto()'],['../classoperations__research_1_1_m_p_solver_common_parameters.html#af910aca53a97ebc29b0a0b528eeb1671',1,'operations_research::MPSolverCommonParameters::TableStruct_ortools_2flinear_5fsolver_2flinear_5fsolver_2eproto()'],['../classoperations__research_1_1_optional_double.html#af910aca53a97ebc29b0a0b528eeb1671',1,'operations_research::OptionalDouble::TableStruct_ortools_2flinear_5fsolver_2flinear_5fsolver_2eproto()'],['../classoperations__research_1_1_m_p_model_proto.html#af910aca53a97ebc29b0a0b528eeb1671',1,'operations_research::MPModelProto::TableStruct_ortools_2flinear_5fsolver_2flinear_5fsolver_2eproto()'],['../classoperations__research_1_1_partial_variable_assignment.html#af910aca53a97ebc29b0a0b528eeb1671',1,'operations_research::PartialVariableAssignment::TableStruct_ortools_2flinear_5fsolver_2flinear_5fsolver_2eproto()'],['../classoperations__research_1_1_m_p_quadratic_objective.html#af910aca53a97ebc29b0a0b528eeb1671',1,'operations_research::MPQuadraticObjective::TableStruct_ortools_2flinear_5fsolver_2flinear_5fsolver_2eproto()'],['../classoperations__research_1_1_m_p_array_constraint.html#af910aca53a97ebc29b0a0b528eeb1671',1,'operations_research::MPArrayConstraint::TableStruct_ortools_2flinear_5fsolver_2flinear_5fsolver_2eproto()'],['../classoperations__research_1_1_m_p_abs_constraint.html#af910aca53a97ebc29b0a0b528eeb1671',1,'operations_research::MPAbsConstraint::TableStruct_ortools_2flinear_5fsolver_2flinear_5fsolver_2eproto()'],['../classoperations__research_1_1_m_p_quadratic_constraint.html#af910aca53a97ebc29b0a0b528eeb1671',1,'operations_research::MPQuadraticConstraint::TableStruct_ortools_2flinear_5fsolver_2flinear_5fsolver_2eproto()'],['../classoperations__research_1_1_m_p_sos_constraint.html#af910aca53a97ebc29b0a0b528eeb1671',1,'operations_research::MPSosConstraint::TableStruct_ortools_2flinear_5fsolver_2flinear_5fsolver_2eproto()'],['../classoperations__research_1_1_m_p_indicator_constraint.html#af910aca53a97ebc29b0a0b528eeb1671',1,'operations_research::MPIndicatorConstraint::TableStruct_ortools_2flinear_5fsolver_2flinear_5fsolver_2eproto()'],['../classoperations__research_1_1_m_p_general_constraint_proto.html#af910aca53a97ebc29b0a0b528eeb1671',1,'operations_research::MPGeneralConstraintProto::TableStruct_ortools_2flinear_5fsolver_2flinear_5fsolver_2eproto()'],['../classoperations__research_1_1_m_p_constraint_proto.html#af910aca53a97ebc29b0a0b528eeb1671',1,'operations_research::MPConstraintProto::TableStruct_ortools_2flinear_5fsolver_2flinear_5fsolver_2eproto()'],['../classoperations__research_1_1_m_p_variable_proto.html#af910aca53a97ebc29b0a0b528eeb1671',1,'operations_research::MPVariableProto::TableStruct_ortools_2flinear_5fsolver_2flinear_5fsolver_2eproto()']]],
['tablestruct_5fortools_5f2fpacking_5f2fvector_5f5fbin_5f5fpacking_5f2eproto_14',['TableStruct_ortools_2fpacking_2fvector_5fbin_5fpacking_2eproto',['../classoperations__research_1_1packing_1_1vbp_1_1_vector_bin_packing_one_bin_in_solution.html#a62ff1b455b0d100c196a0c3821fb489d',1,'operations_research::packing::vbp::VectorBinPackingOneBinInSolution::TableStruct_ortools_2fpacking_2fvector_5fbin_5fpacking_2eproto()'],['../classoperations__research_1_1packing_1_1vbp_1_1_vector_bin_packing_solution.html#a62ff1b455b0d100c196a0c3821fb489d',1,'operations_research::packing::vbp::VectorBinPackingSolution::TableStruct_ortools_2fpacking_2fvector_5fbin_5fpacking_2eproto()'],['../classoperations__research_1_1packing_1_1vbp_1_1_item.html#a62ff1b455b0d100c196a0c3821fb489d',1,'operations_research::packing::vbp::Item::TableStruct_ortools_2fpacking_2fvector_5fbin_5fpacking_2eproto()'],['../classoperations__research_1_1packing_1_1vbp_1_1_vector_bin_packing_problem.html#a62ff1b455b0d100c196a0c3821fb489d',1,'operations_research::packing::vbp::VectorBinPackingProblem::TableStruct_ortools_2fpacking_2fvector_5fbin_5fpacking_2eproto()']]],
|
['tablestruct_5fortools_5f2fsat_5f2fcp_5f5fmodel_5f5fservice_5f2eproto_17',['TableStruct_ortools_2fsat_2fcp_5fmodel_5fservice_2eproto',['../classoperations__research_1_1sat_1_1v1_1_1_cp_solver_request.html#a3203d43fb8062a20e2495918a06949f7',1,'operations_research::sat::v1::CpSolverRequest']]],
['tablestruct_5fortools_5f2fsat_5f2fsat_5f5fparameters_5f2eproto_18',['TableStruct_ortools_2fsat_2fsat_5fparameters_2eproto',['../classoperations__research_1_1sat_1_1_sat_parameters.html#a1f3d169deac565ec8fc486cd2aaf6270',1,'operations_research::sat::SatParameters']]],
['tablestruct_5fortools_5f2fscheduling_5f2fjobshop_5f5fscheduling_5f2eproto_19',['TableStruct_ortools_2fscheduling_2fjobshop_5fscheduling_2eproto',['../classoperations__research_1_1scheduling_1_1jssp_1_1_task.html#a02f1f6ccaf1360c2406408a8fa9b4f6e',1,'operations_research::scheduling::jssp::Task::TableStruct_ortools_2fscheduling_2fjobshop_5fscheduling_2eproto()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_job.html#a02f1f6ccaf1360c2406408a8fa9b4f6e',1,'operations_research::scheduling::jssp::Job::TableStruct_ortools_2fscheduling_2fjobshop_5fscheduling_2eproto()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_transition_time_matrix.html#a02f1f6ccaf1360c2406408a8fa9b4f6e',1,'operations_research::scheduling::jssp::TransitionTimeMatrix::TableStruct_ortools_2fscheduling_2fjobshop_5fscheduling_2eproto()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_machine.html#a02f1f6ccaf1360c2406408a8fa9b4f6e',1,'operations_research::scheduling::jssp::Machine::TableStruct_ortools_2fscheduling_2fjobshop_5fscheduling_2eproto()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_job_precedence.html#a02f1f6ccaf1360c2406408a8fa9b4f6e',1,'operations_research::scheduling::jssp::JobPrecedence::TableStruct_ortools_2fscheduling_2fjobshop_5fscheduling_2eproto()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_jssp_input_problem.html#a02f1f6ccaf1360c2406408a8fa9b4f6e',1,'operations_research::scheduling::jssp::JsspInputProblem::TableStruct_ortools_2fscheduling_2fjobshop_5fscheduling_2eproto()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_assigned_task.html#a02f1f6ccaf1360c2406408a8fa9b4f6e',1,'operations_research::scheduling::jssp::AssignedTask::TableStruct_ortools_2fscheduling_2fjobshop_5fscheduling_2eproto()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_assigned_job.html#a02f1f6ccaf1360c2406408a8fa9b4f6e',1,'operations_research::scheduling::jssp::AssignedJob::TableStruct_ortools_2fscheduling_2fjobshop_5fscheduling_2eproto()'],['../classoperations__research_1_1scheduling_1_1jssp_1_1_jssp_output_solution.html#a02f1f6ccaf1360c2406408a8fa9b4f6e',1,'operations_research::scheduling::jssp::JsspOutputSolution::TableStruct_ortools_2fscheduling_2fjobshop_5fscheduling_2eproto()']]],
['tablestruct_5fortools_5f2fscheduling_5f2frcpsp_5f2eproto_20',['TableStruct_ortools_2fscheduling_2frcpsp_2eproto',['../classoperations__research_1_1scheduling_1_1rcpsp_1_1_resource.html#a4955e7891042c0b9f8b795259eb320a6',1,'operations_research::scheduling::rcpsp::Resource::TableStruct_ortools_2fscheduling_2frcpsp_2eproto()'],['../classoperations__research_1_1scheduling_1_1rcpsp_1_1_recipe.html#a4955e7891042c0b9f8b795259eb320a6',1,'operations_research::scheduling::rcpsp::Recipe::TableStruct_ortools_2fscheduling_2frcpsp_2eproto()'],['../classoperations__research_1_1scheduling_1_1rcpsp_1_1_per_recipe_delays.html#a4955e7891042c0b9f8b795259eb320a6',1,'operations_research::scheduling::rcpsp::PerRecipeDelays::TableStruct_ortools_2fscheduling_2frcpsp_2eproto()'],['../classoperations__research_1_1scheduling_1_1rcpsp_1_1_per_successor_delays.html#a4955e7891042c0b9f8b795259eb320a6',1,'operations_research::scheduling::rcpsp::PerSuccessorDelays::TableStruct_ortools_2fscheduling_2frcpsp_2eproto()'],['../classoperations__research_1_1scheduling_1_1rcpsp_1_1_task.html#a4955e7891042c0b9f8b795259eb320a6',1,'operations_research::scheduling::rcpsp::Task::TableStruct_ortools_2fscheduling_2frcpsp_2eproto()'],['../classoperations__research_1_1scheduling_1_1rcpsp_1_1_rcpsp_problem.html#a4955e7891042c0b9f8b795259eb320a6',1,'operations_research::scheduling::rcpsp::RcpspProblem::TableStruct_ortools_2fscheduling_2frcpsp_2eproto()']]]
];
|
['tablestruct_5fortools_5f2fsat_5f2fboolean_5f5fproblem_5f2eproto_15',['TableStruct_ortools_2fsat_2fboolean_5fproblem_2eproto',['../classoperations__research_1_1sat_1_1_linear_boolean_constraint.html#a83fcfcd1ea873d513cd82ef322a32e7e',1,'operations_research::sat::LinearBooleanConstraint::TableStruct_ortools_2fsat_2fboolean_5fproblem_2eproto()'],['../classoperations__research_1_1sat_1_1_linear_objective.html#a83fcfcd1ea873d513cd82ef322a32e7e',1,'operations_research::sat::LinearObjective::TableStruct_ortools_2fsat_2fboolean_5fproblem_2eproto()'],['../classoperations__research_1_1sat_1_1_boolean_assignment.html#a83fcfcd1ea873d513cd82ef322a32e7e',1,'operations_research::sat::BooleanAssignment::TableStruct_ortools_2fsat_2fboolean_5fproblem_2eproto()'],['../classoperations__research_1_1sat_1_1_linear_boolean_problem.html#a83fcfcd1ea873d513cd82ef322a32e7e',1,'operations_research::sat::LinearBooleanProblem::TableStruct_ortools_2fsat_2fboolean_5fproblem_2eproto()']]],
['tablestruct_5fortools_5f2fsat_5f2fcp_5f5fmodel_5f2eproto_16',['TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto',['../classoperations__research_1_1sat_1_1_inverse_constraint_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::InverseConstraintProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_float_objective_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::FloatObjectiveProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_automaton_constraint_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::AutomatonConstraintProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_list_of_variables_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::ListOfVariablesProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_constraint_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::ConstraintProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_cp_objective_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::CpObjectiveProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_decision_strategy_proto___affine_transformation.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::DecisionStrategyProto_AffineTransformation::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_decision_strategy_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::DecisionStrategyProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_partial_variable_assignment.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::PartialVariableAssignment::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_sparse_permutation_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::SparsePermutationProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_dense_matrix_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::DenseMatrixProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_symmetry_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::SymmetryProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_cp_model_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::CpModelProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_cp_solver_solution.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::CpSolverSolution::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_cp_solver_response.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::CpSolverResponse::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_element_constraint_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::ElementConstraintProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_table_constraint_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::TableConstraintProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_integer_variable_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::IntegerVariableProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_bool_argument_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::BoolArgumentProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_linear_expression_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::LinearExpressionProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_linear_argument_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::LinearArgumentProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_all_different_constraint_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::AllDifferentConstraintProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_linear_constraint_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::LinearConstraintProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_interval_constraint_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::IntervalConstraintProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_no_overlap_constraint_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::NoOverlapConstraintProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_no_overlap2_d_constraint_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::NoOverlap2DConstraintProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_cumulative_constraint_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::CumulativeConstraintProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_reservoir_constraint_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::ReservoirConstraintProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_circuit_constraint_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::CircuitConstraintProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()'],['../classoperations__research_1_1sat_1_1_routes_constraint_proto.html#afb8396aa773b2cf0b644f1ddf0f6f75f',1,'operations_research::sat::RoutesConstraintProto::TableStruct_ortools_2fsat_2fcp_5fmodel_2eproto()']]],
|
index.js
|
let buttons = ["d", "f"];
let leds = [0,0];
document.onkeydown = function(e) {
if (e.key == buttons[0]) {
document.getElementById("left-btn").className = "keydown";
}
if (e.key == buttons[1]) {
document.getElementById("right-btn").className = "keydown";
}
}
document.onkeyup = function(e) {
if (e.key == buttons[0]) {
document.getElementById("left-btn").className = "keyup";
}
if (e.key == buttons[1]) {
document.getElementById("right-btn").className = "keyup";
}
}
let settingsList = {
"left-btn": {
"window" : true,
"title" : "Keybinding",
"content" : `
<div style="display: flex; justify-content: center; align-items: center; height: 70%; flex-direction: column;">
<p>Click to change</p>
<input class="binding" type="text" style="width: 70px; height: 70px;" id="key-input" maxlength=1 onkeypress="return (event.charCode >= 65 && event.charCode <= 90) || (event.charCode >= 97 && event.charCode <= 122) || (event.charCode >= 48 && event.charCode <= 57)" autocomplete="off"></input>
</div>
`,
"onclose" : async ()=>{
},
"onapply" : async ()=>{
let key = document.getElementById("key-input").value;
if(key.length > 1 || key.length < 1){
setTimeout(() => {
createWindow("Error", `
<div style="display: flex; justify-content: center; align-items: center; height: 100%; flex-direction: column;">
<p>Please enter a single character</p>
</div>
`);
}, 300);
return;
}
key = key.toLowerCase();
let command = " setkey-left " + key;
await send(command);
let data = await receive();
console.log(data)
if (data == "1"){
buttons[0] = key;
document.getElementById("left-btn").innerHTML = `<p style="margin: 0; padding: 0;">${key.toUpperCase()}</p>`;
}
}
},
"right-btn": {
"window" : true,
"title" : "Keybinding",
"content" : `
<div style="display: flex; justify-content: center; align-items: center; height: 70%; flex-direction: column;">
<p>Click to change</p>
<input class="binding" type="text" style="width: 70px; height: 70px;" id="key-input" maxlength=1 onkeypress="return (event.charCode >= 65 && event.charCode <= 90) || (event.charCode >= 97 && event.charCode <= 122) || (event.charCode >= 48 && event.charCode <= 57)" autocomplete="off"></input>
</div>
`,
"onapply" : async ()=>{
let key = document.getElementById("key-input").value;
if(key.length > 1 || key.length < 1){
setTimeout(() => {
createWindow("Error", "Please enter a single character", ()=>{}, ()=>{});
}, 300);
return;
}
key = key.toLowerCase();
let command = " setkey-right " + key;
await send(command);
let data = await receive();
console.log(data)
if (data == "1"){
buttons[1] = key;
document.getElementById("right-btn").innerHTML = `<p style="margin: 0; padding: 0;">${key.toUpperCase()}</p>`;
}
}
},
"left-led": {
"window" : false,
"onapply" : async ()=>{
if (leds[0] == 0){
let command = " setled-left " + 1;
await send(command);
let data = await receive();
if(data[0] == "1"){
leds[0] = 1;
}
document.getElementById("left-led").className = "led ledon";
}else{
let command = " setled-left " + 0;
await send(command);
let data = await receive();
if(data[0] == "1"){
leds[0] = 0;
}
document.getElementById("left-led").className = "led ledoff";
}
}
},
"right-led": {
"window" : false,
"onapply" : async ()=>{
console.log(leds[0]);
if (leds[1] == 0){
let command = " setled-right " + 1;
await send(command);
console.log("sent");
let data = await receive();
console.log(data);
if(data[0] == "1"){
leds[1] = 1;
|
document.getElementById("right-led").className = "led ledon";
}else{
let command = " setled-right " + 0;
await send(command);
console.log("sent");
let data = await receive();
console.log(data);
if(data[0] == "1"){
leds[1] = 0;
}
document.getElementById("right-led").className = "led ledoff";
}
}
}
}
function setting(key){
let settings = settingsList[key];
if(settings.window)
createWindow(settings.title, settings.content, settings.onclose, settings.onapply);
else{
settings.onapply();
}
}
function createWindow(title, init, onclose, onapply){
randomID = Math.round(Math.random()*1000);
let applybtn = '<p id="apply-btn" class="btn" style="padding: 10px; width: 70px;">Apply</p>';
if(!onapply) applybtn = "";
code = `
<div class="window" id="bg${randomID}">
<div id="window">
<div style="display: flex; height: 50px; max-height: 50px;">
<h2 style="text-align: center; font-weight: 400; width:450px; background-color: rgb(22, 22, 22); margin: 0; padding-top: 10px;">${title}</h2>
<p id="close-btn" style="margin: 0; width: 50px; height: 40px; text-align: center; padding-top: 10px; font-size: 24px;">X</p>
</div>
<div id="content" style="height: 100%">
${init}
<div style="display: flex; justify-content: center; align-items: center;">
${applybtn}
</div>
</div>
</div>
</div>`;
document.body.innerHTML = code + document.body.innerHTML;
let bg = document.getElementById("bg"+randomID);
bg.classList.add("fadein");
if(onapply){
document.getElementById("apply-btn").onclick = function(){
bg.classList.remove("fadein");
bg.classList.add("fadeout");
if (typeof onapply === "function")
onapply();
setTimeout(()=>{
bg.remove();
}, 200);
}
}
document.getElementById("close-btn").onclick = function(){
bg.classList.remove("fadein");
bg.classList.add("fadeout");
if (typeof onclose === "function")
onclose();
setTimeout(()=>{
bg.remove();
}, 200);
}
}
/*
document.getElementById("debug-btn").onclick = function(){
let console = document.getElementById("console");
if (console.className == "consoleout"){
document.getElementById("console").className = "consolein";
}else{
document.getElementById("console").className = "consoleout";
}
}*/
|
}
|
images.go
|
package daemon
import (
"context"
"fmt"
"github.com/go-kit/kit/log"
"github.com/pkg/errors"
"github.com/fluxcd/flux/pkg/cluster"
"github.com/fluxcd/flux/pkg/policy"
"github.com/fluxcd/flux/pkg/resource"
"github.com/fluxcd/flux/pkg/update"
)
func (d *Daemon) pollForNewAutomatedWorkloadImages(logger log.Logger) {
logger.Log("msg", "polling for new images for automated workloads")
ctx := context.Background()
candidateWorkloads, err := d.getAllowedAutomatedResources(ctx)
if err != nil {
logger.Log("error", errors.Wrap(err, "getting unlocked automated resources"))
return
}
if len(candidateWorkloads) == 0 {
logger.Log("msg", "no automated workloads")
return
}
// Find images to check
workloads, err := d.Cluster.SomeWorkloads(ctx, candidateWorkloads.IDs())
if err != nil {
logger.Log("error", errors.Wrap(err, "checking workloads for new images"))
return
}
// Check the latest available image(s) for each workload
imageRepos, err := update.FetchImageRepos(d.Registry, clusterContainers(workloads), logger)
if err != nil {
logger.Log("error", errors.Wrap(err, "fetching image updates"))
return
}
// Queue an image refresh job for each container, this results of which
// will most likely only be available during the next polling job
for _, containers := range clusterContainers(workloads) {
for _, container := range containers.ContainersOrNil() {
d.ImageRefresh <- container.Image.Name
}
}
changes := calculateChanges(logger, candidateWorkloads, workloads, imageRepos)
if len(changes.Changes) > 0 {
d.UpdateManifests(ctx, update.Spec{Type: update.Auto, Spec: changes})
}
}
type resources map[resource.ID]resource.Resource
func (r resources) IDs() (ids []resource.ID) {
for k, _ := range r {
ids = append(ids, k)
}
return ids
}
// getAllowedAutomatedResources returns all the resources that are
// automated but do not have policies set to restrain them from
// getting updated.
func (d *Daemon) getAllowedAutomatedResources(ctx context.Context) (resources, error) {
resources, _, err := d.getResources(ctx)
if err != nil {
return nil, err
}
result := map[resource.ID]resource.Resource{}
for _, resource := range resources {
policies := resource.Policies()
if policies.Has(policy.Automated) && !policies.Has(policy.Locked) && !policies.Has(policy.Ignore) {
result[resource.ResourceID()] = resource
}
}
return result, nil
}
func calculateChanges(logger log.Logger, candidateWorkloads resources, workloads []cluster.Workload, imageRepos update.ImageRepos) *update.Automated
|
{
changes := &update.Automated{}
for _, workload := range workloads {
var p policy.Set
if resource, ok := candidateWorkloads[workload.ID]; ok {
p = resource.Policies()
}
containers:
for _, container := range workload.ContainersOrNil() {
currentImageID := container.Image
pattern := policy.GetTagPattern(p, container.Name)
repo := currentImageID.Name
logger := log.With(logger, "workload", workload.ID, "container", container.Name, "repo", repo, "pattern", pattern, "current", currentImageID)
repoMetadata := imageRepos.GetRepositoryMetadata(repo)
images, err := update.FilterAndSortRepositoryMetadata(repoMetadata, pattern)
if err != nil {
logger.Log("warning", fmt.Sprintf("inconsistent repository metadata: %s", err), "action", "skip container")
continue containers
}
if latest, ok := images.Latest(); ok && latest.ID != currentImageID {
if latest.ID.Tag == "" {
logger.Log("warning", "untagged image in available images", "action", "skip container")
continue containers
}
current := repoMetadata.FindImageWithRef(currentImageID)
if pattern.RequiresTimestamp() && (current.CreatedAt.IsZero() || latest.CreatedAt.IsZero()) {
logger.Log("warning", "image with zero created timestamp", "current", fmt.Sprintf("%s (%s)", current.ID, current.CreatedAt), "latest", fmt.Sprintf("%s (%s)", latest.ID, latest.CreatedAt), "action", "skip container")
continue containers
}
newImage := currentImageID.WithNewTag(latest.ID.Tag)
changes.Add(workload.ID, container, newImage)
logger.Log("info", "added update to automation run", "new", newImage, "reason", fmt.Sprintf("latest %s (%s) > current %s (%s)", latest.ID.Tag, latest.CreatedAt, currentImageID.Tag, current.CreatedAt))
} else {
logger.Log("debug", "image no updates found", "latest", latest.ID, "createdAt", latest.CreatedAt)
}
}
}
return changes
}
|
|
p0086_partition_list.rs
|
#![allow(dead_code)]
pub struct
|
;
use crate::util::ListNode;
impl Solution {
pub fn partition(head: Option<Box<ListNode>>, x: i32) -> Option<Box<ListNode>> {
let mut left: Option<Box<ListNode>> = None;
let mut right: Option<Box<ListNode>> = None;
let mut lnext = &mut left;
let mut rnext = &mut right;
let mut head = head;
while let Some(mut node) = head {
head = node.next.take();
if node.val < x {
*lnext = Some(node);
lnext = &mut lnext.as_mut().unwrap().next;
} else {
*rnext = Some(node);
rnext = &mut rnext.as_mut().unwrap().next;
}
}
*lnext = right;
left
}
}
#[cfg(test)]
mod tests {
use super::*;
fn to_list(v: Vec<i32>) -> Option<Box<ListNode>> {
let mut cur = None;
for &x in v.iter().rev() {
let mut n = ListNode::new(x);
n.next = cur;
cur = Some(Box::new(n));
}
cur
}
#[test]
fn test_partition() {
assert_eq!(
Solution::partition(list!(1, 4, 3, 2, 5, 2), 3),
list!(1, 2, 2, 4, 3, 5)
);
}
}
|
Solution
|
privacy-service.service.ts
|
/*
* Copyright 2020 Merck Sharp & Dohme Corp. a subsidiary of Merck & Co.,
* Inc., Kenilworth, NJ, USA.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Injectable } from '@angular/core';
import { HttpClient, HttpHeaders } from '@angular/common/http';
import { FormGroup } from '@angular/forms';
@Injectable({
providedIn: 'root'
})
export class PrivacyService {
constructor(private _httpClient: HttpClient) { }
|
getAllPrivacyRules() {
return this._httpClient.get('/admin/dataprivacyrules');
}
addPrivacyRule(privacyForm: FormGroup, order, editor) {
const params = {
data_field: privacyForm.get('dataElement').value,
can_store: privacyForm.get('privacySettings').value ? true : false,
order: order,
editor: editor
};
return this._httpClient.post('/admin/dataprivacyrules', params);
}
editPrivacyRule(privacyForm: FormGroup, order, editor, id) {
const params = {
data_field: privacyForm.get('dataElement').value,
can_store: privacyForm.get('privacySettings').value,
editor: editor
};
return this._httpClient.patch(`/admin/dataprivacyrules/${id}`, params);
}
deletePrivacyRule(id, editor) {
const params = {
editor: editor
};
const httpOptions = {
headers: new HttpHeaders(),
body: params
};
return this._httpClient.delete(`/admin/dataprivacyrules/${id}`, httpOptions);
}
reorderPrivacyRule(idArray, editor) {
const params = {
rulesordered: idArray,
editor: editor
};
return this._httpClient.patch('/admin/dataprivacyrules', params);
}
getPrivacyRule(id) {
return this._httpClient.get(`/admin/dataprivacyrules/${id}`);
}
getDataElement() {
return this._httpClient.get('/admin/dataprivacyrules/config/datafields');
}
getPrivacyRulesHistory() {
return this._httpClient.get('/admin/dataprivacyrules/history');
}
}
| |
locations.js
|
import api from '../services/api-service'
import { formatDate } from '../helpers/date'
import config from '../config/api-config'
class
|
{
constructor(api, helpers, config) {
this.api = api
this.countries = null
this.cities = null
this.shortCitiesList = {}
this.airlines = null
this.formatDate = helpers.formatDate
this.lastSearch = {}
this.ticketsObj = {}
this.urlImg = config.urlImg
}
async init() {
const response = await Promise.all([
this.api.countries(),
this.api.cities(),
this.api.airlines()
])
const [countries, cities, airlines] = response
this.countries = this.serializeCountries(countries)
this.cities = this.serializeCities(cities)
this.shortCitiesList = this.createShortCitiesList(this.cities)
this.airlines = this.serializeAirlines(airlines)
return response
}
getCityCodeByKey(key) {
const city = Object.values(this.cities).find((item) => item.full_name === key)
return city.code
}
getCityNameByCode(code) {
return this.cities[code].name
}
getAirlineNameByCode(code) {
return this.airlines[code] ? this.airlines[code].name : ''
}
getAirlineLogoByCode(code) {
return this.airlines[code] ? this.airlines[code].logo : ''
}
createShortCitiesList(cities) {
return Object.entries(cities).reduce((acc, [, city]) => {
acc[city.full_name] = null
return acc
}, { })
}
serializeAirlines(airlines) {
return airlines.reduce((acc, item) => {
item.logo = `${this.urlImg}/${item.code}.png`
item.name = item.name || item.name_translations.en
acc[item.code] = item
return acc
},{})
}
serializeCountries(countries) {
//{ 'Country code': {...} }
return countries.reduce((acc, country) => {
acc[country.code] = country
return acc
}, {})
}
serializeCities(cities) {
// { 'City name, Country name': {...} }
return cities.reduce((acc, city) => {
const country_name = this.countries[city.country_code].name
city.name = city.name || city.name_translations.en
const full_name = `${city.name}, ${country_name}`
acc[city.code] = {
...city,
country_name,
full_name
}
return acc
}, {})
}
serializeTickets(tickets) {
return Object.values(tickets).map(ticket => {
return {
...ticket,
_id: Math.random() * 10 / 25,
origin_name: this.getCityNameByCode(ticket.origin),
destination_name: this.getCityNameByCode(ticket.destination),
airline_logo: this.getAirlineLogoByCode(ticket.airline),
airline_name: this.getAirlineNameByCode(ticket.airline),
departure_at: this.formatDate(ticket.departure_at, 'dd MMM yyyy hh:mm'),
return_at: this.formatDate(ticket.return_at, 'dd MMM yyyy hh:mm'),
}
})
}
convertTicketsToObject(tickets) {
this.ticketsObj = tickets.reduce((acc, elem) => {
acc[elem._id] = elem
return acc
}, {})
}
async fetchTickets(params) {
const response = await this.api.prices(params)
return this.lastSearch = this.serializeTickets(response.data)
}
}
const locations = new Locations(api, { formatDate }, config)
export default locations
// { 'City, Country': null }
// [{}, {}]
// { 'city': {...} } => cities[code]
|
Locations
|
Groups.js
|
'use strict';
const validateGroupInput = require('./validation/group');
const _ = require('lodash');
/**
* Groups controller
*/
module.exports = {
/**
* GET /groups handler
* Returns a list of available groups
* @param {Object} ctx - koa context
*/
async getGroups(ctx) {
const service = strapi.plugins['content-type-builder'].services.groups;
const data = service.getGroups();
ctx.send({ data });
},
/**
* GET /groups/:uid
* Returns a specific group
* @param {Object} ctx - koa context
*/
async getGroup(ctx) {
const { uid } = ctx.params;
const service = strapi.plugins['content-type-builder'].services.groups;
const group = service.getGroup(uid);
if (!group) {
return ctx.send({ error: 'group.notFound' }, 404);
}
ctx.send({ data: group });
},
/**
* POST /groups
* Creates a group and returns its infos
* @param {Object} ctx - koa context
*/
async createGroup(ctx) {
const { body } = ctx.request;
try {
await validateGroupInput(body);
} catch (error) {
return ctx.send({ error }, 400);
}
const service = strapi.plugins['content-type-builder'].services.groups;
const uid = service.createGroupUID(body.name);
if (service.getGroup(uid)) {
return ctx.send({ error: 'group.alreadyExists' }, 400);
}
strapi.reload.isWatching = false;
const newGroup = await service.createGroup(uid, body);
strapi.reload();
ctx.send({ data: newGroup }, 201);
},
/**
* PUT /groups/:uid
* Updates a group and return its infos
* @param {Object} ctx - koa context - enhanced koa context
*/
async updateGroup(ctx) {
const { uid } = ctx.params;
const { body } = ctx.request;
const service = strapi.plugins['content-type-builder'].services.groups;
const group = service.getGroup(uid);
if (!group) {
return ctx.send({ error: 'group.notFound' }, 404);
}
|
if (body.attributes[attribute].default === '') {
body.attributes[attribute].default = undefined;
}
});
}
try {
await validateGroupInput(body);
} catch (error) {
return ctx.send({ error }, 400);
}
strapi.reload.isWatching = false;
const updatedGroup = await service.updateGroup(group, body);
await service.updateGroupInModels(group.uid, updatedGroup.uid);
strapi.reload();
ctx.send({ data: updatedGroup }, 200);
},
/**
* DELETE /groups/:uid
* Deletes a groups and returns its old infos
* @param {Object} ctx - koa context
*/
async deleteGroup(ctx) {
const { uid } = ctx.params;
const service = strapi.plugins['content-type-builder'].services.groups;
const group = service.getGroup(uid);
if (!group) {
return ctx.send({ error: 'group.notFound' }, 404);
}
strapi.reload.isWatching = false;
await service.deleteGroup(group);
await service.deleteGroupInModels(group.uid);
strapi.reload();
ctx.send({ data: { uid } }, 200);
},
};
|
// convert zero length string on default attributes to undefined
if (_.has(body, 'attributes')) {
Object.keys(body.attributes).forEach(attribute => {
|
make_changelog.py
|
#!/usr/bin/python3
"""
Generate a markdown changelog for the rclone project
"""
import os
import sys
import re
import datetime
import subprocess
from collections import defaultdict
IGNORE_RES = [
r"^Add .* to contributors$",
r"^Start v\d+\.\d+(\.\d+)?-DEV development$",
r"^Version v\d+\.\d+(\.\d+)?$",
]
IGNORE_RE = re.compile("(?:" + "|".join(IGNORE_RES) + ")")
CATEGORY = re.compile(r"(^[\w/ ]+(?:, *[\w/ ]+)*):\s*(.*)$")
backends = [ x for x in os.listdir("backend") if x != "all"]
backend_aliases = {
"amazon cloud drive" : "amazonclouddrive",
"acd" : "amazonclouddrive",
"google cloud storage" : "googlecloudstorage",
"gcs" : "googlecloudstorage",
"azblob" : "azureblob",
"mountlib": "mount",
"cmount": "mount",
"mount/cmount": "mount",
}
backend_titles = {
"amazonclouddrive": "Amazon Cloud Drive",
"googlecloudstorage": "Google Cloud Storage",
"azureblob": "Azure Blob",
"ftp": "FTP",
"sftp": "SFTP",
"http": "HTTP",
"webdav": "WebDAV",
}
STRIP_FIX_RE = re.compile(r"(\s+-)?\s+((fixes|addresses)\s+)?#\d+", flags=re.I)
STRIP_PATH_RE = re.compile(r"^(backend|fs)/")
IS_FIX_RE = re.compile(r"\b(fix|fixes)\b", flags=re.I)
def make_out(data, indent=""):
"""Return a out, lines the first being a function for output into the second"""
out_lines = []
def out(category, title=None):
if title == None:
title = category
lines = data.get(category)
if not lines:
return
del(data[category])
if indent != "" and len(lines) == 1:
out_lines.append(indent+"* " + title+": " + lines[0])
return
out_lines.append(indent+"* " + title)
for line in lines:
out_lines.append(indent+" * " + line)
return out, out_lines
def
|
(log):
"""Process the incoming log into a category dict of lists"""
by_category = defaultdict(list)
for log_line in reversed(log.split("\n")):
log_line = log_line.strip()
hash, author, timestamp, message = log_line.split("|", 3)
message = message.strip()
if IGNORE_RE.search(message):
continue
match = CATEGORY.search(message)
categories = "UNKNOWN"
if match:
categories = match.group(1).lower()
message = match.group(2)
message = STRIP_FIX_RE.sub("", message)
message = message +" ("+author+")"
message = message[0].upper()+message[1:]
seen = set()
for category in categories.split(","):
category = category.strip()
category = STRIP_PATH_RE.sub("", category)
category = backend_aliases.get(category, category)
if category in seen:
continue
by_category[category].append(message)
seen.add(category)
#print category, hash, author, timestamp, message
return by_category
def main():
if len(sys.argv) != 3:
print("Syntax: %s vX.XX vX.XY" % sys.argv[0], file=sys.stderr)
sys.exit(1)
version, next_version = sys.argv[1], sys.argv[2]
log = subprocess.check_output(["git", "log", '''--pretty=format:%H|%an|%aI|%s'''] + [version+".."+next_version])
log = log.decode("utf-8")
by_category = process_log(log)
# Output backends first so remaining in by_category are core items
out, backend_lines = make_out(by_category)
out("mount", title="Mount")
out("vfs", title="VFS")
out("local", title="Local")
out("cache", title="Cache")
out("crypt", title="Crypt")
backend_names = sorted(x for x in list(by_category.keys()) if x in backends)
for backend_name in backend_names:
if backend_name in backend_titles:
backend_title = backend_titles[backend_name]
else:
backend_title = backend_name.title()
out(backend_name, title=backend_title)
# Split remaining in by_category into new features and fixes
new_features = defaultdict(list)
bugfixes = defaultdict(list)
for name, messages in by_category.items():
for message in messages:
if IS_FIX_RE.search(message):
bugfixes[name].append(message)
else:
new_features[name].append(message)
# Output new features
out, new_features_lines = make_out(new_features, indent=" ")
for name in sorted(new_features.keys()):
out(name)
# Output bugfixes
out, bugfix_lines = make_out(bugfixes, indent=" ")
for name in sorted(bugfixes.keys()):
out(name)
# Read old changlog and split
with open("docs/content/changelog.md") as fd:
old_changelog = fd.read()
heading = "# Changelog"
i = old_changelog.find(heading)
if i < 0:
raise AssertionError("Couldn't find heading in old changelog")
i += len(heading)
old_head, old_tail = old_changelog[:i], old_changelog[i:]
# Update the build date
old_head = re.sub(r"\d\d\d\d-\d\d-\d\d", str(datetime.date.today()), old_head)
# Output combined changelog with new part
sys.stdout.write(old_head)
sys.stdout.write("""
## %s - %s
* New backends
* New commands
* New Features
%s
* Bug Fixes
%s
%s""" % (next_version, datetime.date.today(), "\n".join(new_features_lines), "\n".join(bugfix_lines), "\n".join(backend_lines)))
sys.stdout.write(old_tail)
if __name__ == "__main__":
main()
|
process_log
|
ktgr_action3.py
|
import httpx
from anilist.types import Anime
from pyrogram import filters
from pyrogram.types import CallbackQuery
from pyromod.helpers import ikb
from pyromod.nav import Pagination
from amime.amime import Amime
@Amime.on_callback_query(filters.regex(r"^tv_mahousj3 anime (?P<page>\d+)"))
async def anime_suggestions(bot: Amime, callback: CallbackQuery):
page = int(callback.matches[0]["page"])
message = callback.message
lang = callback._lang
keyboard = []
async with httpx.AsyncClient(http2=True) as client:
response = await client.post(
url="https://graphql.anilist.co",
json=dict(
query="""
query($per_page: Int) {
Page(page: 4, perPage: $per_page) {
media(type: ANIME, format: TV, sort: TRENDING_DESC, status: FINISHED, genre: "mahou shoujo") {
id
title {
romaji
english
native
}
siteUrl
}
}
}
""",
variables=dict(
perPage=100,
),
),
|
"Accept": "application/json",
},
)
data = response.json()
await client.aclose()
if data["data"]:
items = data["data"]["Page"]["media"]
suggestions = [
Anime(id=item["id"], title=item["title"], url=item["siteUrl"])
for item in items
]
layout = Pagination(
suggestions,
item_data=lambda i, pg: f"menu {i.id}",
item_title=lambda i, pg: i.title.romaji,
page_data=lambda pg: f"tv_mahousj3 anime {pg}",
)
lines = layout.create(page, lines=8)
if len(lines) > 0:
keyboard += lines
keyboard.append([(lang.Prev, "tv_mahousj2 anime 1"), (lang.Next, "tv_mahousj4 anime 1")])
keyboard.append([(lang.back_button, "ktgr-finish")])
await message.edit_text(
lang.suggestions_text,
reply_markup=ikb(keyboard),
)
|
headers={
"Content-Type": "application/json",
|
model_printer.go
|
/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package explain
import (
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/kube-openapi/pkg/util/proto"
)
// fieldIndentLevel is the level of indentation for fields.
const fieldIndentLevel = 3
// descriptionIndentLevel is the level of indentation for the
// description.
const descriptionIndentLevel = 5
// modelPrinter prints a schema in Writer. Its "Builder" will decide if
// it's recursive or not.
type modelPrinter struct {
Name string
Type string
Descriptions []string
Writer *Formatter
Builder fieldsPrinterBuilder
GVK schema.GroupVersionKind
Error error
}
var _ proto.SchemaVisitor = &modelPrinter{}
func (m *modelPrinter) PrintKindAndVersion() error {
if err := m.Writer.Write("KIND: %s", m.GVK.Kind); err != nil {
return err
}
return m.Writer.Write("VERSION: %s\n", m.GVK.GroupVersion())
}
// PrintDescription prints the description for a given schema. There
// might be multiple description, since we collect descriptions when we
// go through references, arrays and maps.
func (m *modelPrinter) PrintDescription(schema proto.Schema) error {
if err := m.Writer.Write("DESCRIPTION:"); err != nil {
return err
}
empty := true
for i, desc := range append(m.Descriptions, schema.GetDescription()) {
if desc == "" {
continue
}
empty = false
if i != 0 {
if err := m.Writer.Write(""); err != nil {
return err
}
}
if err := m.Writer.Indent(descriptionIndentLevel).WriteWrapped(desc); err != nil {
return err
}
}
if empty {
return m.Writer.Indent(descriptionIndentLevel).WriteWrapped("<empty>")
}
return nil
}
// VisitArray recurses inside the subtype, while collecting the type if
// not done yet, and the description.
func (m *modelPrinter) VisitArray(a *proto.Array) {
m.Descriptions = append(m.Descriptions, a.GetDescription())
if m.Type == "" {
m.Type = GetTypeName(a)
}
a.SubType.Accept(m)
}
// VisitKind prints a full resource with its fields.
func (m *modelPrinter) VisitKind(k *proto.Kind) {
if err := m.PrintKindAndVersion(); err != nil {
m.Error = err
return
}
if m.Type == "" {
m.Type = GetTypeName(k)
}
if m.Name != "" {
m.Writer.Write("RESOURCE: %s <%s>\n", m.Name, m.Type)
}
if err := m.PrintDescription(k); err != nil {
m.Error = err
return
}
if err := m.Writer.Write("\nFIELDS:"); err != nil {
m.Error = err
return
}
m.Error = m.Builder.BuildFieldsPrinter(m.Writer.Indent(fieldIndentLevel)).PrintFields(k)
}
// VisitMap recurses inside the subtype, while collecting the type if
// not done yet, and the description.
func (m *modelPrinter) VisitMap(om *proto.Map) {
m.Descriptions = append(m.Descriptions, om.GetDescription())
if m.Type == "" {
m.Type = GetTypeName(om)
}
om.SubType.Accept(m)
}
// VisitPrimitive prints a field type and its description.
func (m *modelPrinter) VisitPrimitive(p *proto.Primitive) {
if err := m.PrintKindAndVersion(); err != nil {
m.Error = err
return
}
if m.Type == "" {
m.Type = GetTypeName(p)
}
if err := m.Writer.Write("FIELD: %s <%s>\n", m.Name, m.Type); err != nil {
m.Error = err
return
}
m.Error = m.PrintDescription(p)
}
func (m *modelPrinter) VisitArbitrary(a *proto.Arbitrary) {
if err := m.PrintKindAndVersion(); err != nil {
m.Error = err
return
}
|
m.Error = m.PrintDescription(a)
}
// VisitReference recurses inside the subtype, while collecting the description.
func (m *modelPrinter) VisitReference(r proto.Reference) {
m.Descriptions = append(m.Descriptions, r.GetDescription())
r.SubSchema().Accept(m)
}
// PrintModel prints the description of a schema in writer.
func PrintModel(name string, writer *Formatter, builder fieldsPrinterBuilder, schema proto.Schema, gvk schema.GroupVersionKind) error {
m := &modelPrinter{Name: name, Writer: writer, Builder: builder, GVK: gvk}
schema.Accept(m)
return m.Error
}
| |
get_max_instances.go
|
package cloudcallcenter
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//
// Code generated by Alibaba Cloud SDK Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"github.com/CRORCR/alibaba-cloud-sdk-go/sdk/requests"
"github.com/CRORCR/alibaba-cloud-sdk-go/sdk/responses"
)
// GetMaxInstances invokes the cloudcallcenter.GetMaxInstances API synchronously
// api document: https://help.aliyun.com/api/cloudcallcenter/getmaxinstances.html
func (client *Client) GetMaxInstances(request *GetMaxInstancesRequest) (response *GetMaxInstancesResponse, err error) {
response = CreateGetMaxInstancesResponse()
err = client.DoAction(request, response)
return
}
// GetMaxInstancesWithChan invokes the cloudcallcenter.GetMaxInstances API asynchronously
// api document: https://help.aliyun.com/api/cloudcallcenter/getmaxinstances.html
// asynchronous document: https://help.aliyun.com/document_detail/66220.html
func (client *Client) GetMaxInstancesWithChan(request *GetMaxInstancesRequest) (<-chan *GetMaxInstancesResponse, <-chan error) {
responseChan := make(chan *GetMaxInstancesResponse, 1)
errChan := make(chan error, 1)
err := client.AddAsyncTask(func() {
defer close(responseChan)
defer close(errChan)
response, err := client.GetMaxInstances(request)
if err != nil {
errChan <- err
} else {
responseChan <- response
}
})
if err != nil {
errChan <- err
close(responseChan)
close(errChan)
}
return responseChan, errChan
}
// GetMaxInstancesWithCallback invokes the cloudcallcenter.GetMaxInstances API asynchronously
// api document: https://help.aliyun.com/api/cloudcallcenter/getmaxinstances.html
// asynchronous document: https://help.aliyun.com/document_detail/66220.html
func (client *Client) GetMaxInstancesWithCallback(request *GetMaxInstancesRequest, callback func(response *GetMaxInstancesResponse, err error)) <-chan int {
result := make(chan int, 1)
err := client.AddAsyncTask(func() {
var response *GetMaxInstancesResponse
var err error
defer close(result)
response, err = client.GetMaxInstances(request)
callback(response, err)
result <- 1
})
if err != nil {
defer close(result)
callback(nil, err)
result <- 0
}
return result
}
// GetMaxInstancesRequest is the request struct for api GetMaxInstances
type GetMaxInstancesRequest struct {
*requests.RpcRequest
}
// GetMaxInstancesResponse is the response struct for api GetMaxInstances
type GetMaxInstancesResponse struct {
*responses.BaseResponse
RequestId string `json:"RequestId" xml:"RequestId"`
Success bool `json:"Success" xml:"Success"`
Code string `json:"Code" xml:"Code"`
Message string `json:"Message" xml:"Message"`
HttpStatusCode int `json:"HttpStatusCode" xml:"HttpStatusCode"`
Count int `json:"Count" xml:"Count"`
}
// CreateGetMaxInstancesRequest creates a request to invoke GetMaxInstances API
func CreateGetMaxInstancesRequest() (request *GetMaxInstancesRequest)
|
// CreateGetMaxInstancesResponse creates a response to parse from GetMaxInstances response
func CreateGetMaxInstancesResponse() (response *GetMaxInstancesResponse) {
response = &GetMaxInstancesResponse{
BaseResponse: &responses.BaseResponse{},
}
return
}
|
{
request = &GetMaxInstancesRequest{
RpcRequest: &requests.RpcRequest{},
}
request.InitWithApiInfo("CloudCallCenter", "2017-07-05", "GetMaxInstances", "", "")
request.Method = requests.POST
return
}
|
ngrams.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 19 15:03:36 2021
@author: tech
"""
import argparse
import pickle
from collections import Counter
from itertools import tee
from typing import Any, Iterable, Iterator
import spacy
import zstandard as zstd
from spacy.matcher import Matcher
from tqdm import tqdm
from scicopia_tools.db.arango import DbAccess, setup
from scicopia_tools.exceptions import ScicopiaException
# "ADJ": "adjective",
# "ADV": "adverb",
# "NOUN": "noun",
# "PROPN": "proper noun"
# "VERB": "verb",
# "X": "other"
ngram_masks = {
1: [
[ # e.g. "meta-algorithm"
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "state-of-the-art"
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"ORTH": "-"},
{"POS": "ADP"},
{"ORTH": "-"},
{"POS": "DET"},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
],
2: [
[ # e.g. "random variable"
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "space station", "Nash equilibrium"
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "long-term memory"
{"POS": "ADJ"},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "tabular grid-world",
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "human-like AI", "fault-tolerant setting"
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"ORTH": "-"},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "zero-shot transfer"
{"POS": "NUM"},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "Alzheimer's disease"
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": "PART"},
{"POS": "NOUN"},
],
[ # e.g. "semi-supervised learning"
{"POS": "ADJ"},
{"ORTH": "-"},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "grid-world environments"
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "query auto-completion"
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "continuous-state puddle-world"
{"POS": "ADJ"},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "Simulation-to-real transfer"
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"ORTH": "-"},
{"POS": "ADP"},
{"ORTH": "-"},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "stop-and-go traffic"
{"POS": "VERB"},
{"ORTH": "-"},
{"POS": "CCONJ"},
{"ORTH": "-"},
{"POS": "VERB"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "state-of-the-art performance"
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"ORTH": "-"},
{"POS": "ADP"},
{"ORTH": "-"},
{"POS": "DET"},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "optimism-in-face-of-uncertainty principle"
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"ORTH": "-"},
{"POS": "ADP"},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"ORTH": "-"},
{"POS": "ADP"},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "challenging environments"
{"DEP": "amod"},
{"DEP": "pobj"},
],
[ # e.g. "dynamic programming"
{"DEP": "amod"},
{"DEP": "conj"},
],
[ # e.g. "machine learning"
{"DEP": "compound"},
{"DEP": "pobj"},
],
[ # e.g. "Q-learning"
{"DEP": "compound"},
{"ORTH": "-"},
{"DEP": "nsubj"},
],
],
3: [
[ # e.g. "atomic electronic structure",
# "temporally abstract actions"
{"POS": {"IN": ["ADJ", "ADV"]}},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "non-linear dynamical models",
{"POS": "ADJ"},
{"ORTH": "-"},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "new query auto-completion"
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "model-free control options",
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"ORTH": "-"},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "complex multi-vehicle interactions",
{"POS": "ADJ"},
{"POS": "ADJ"},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "preliminary Gazebo experiments", "human world record"
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "air traffic control"
# "Monte Carlo regression"
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "out-of-distribution traffic densities"
{"POS": "SCONJ"},
{"ORTH": "-"},
{"POS": "ADP"},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "X-ray free-electron laser"
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": "ADJ"},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "Sim-to-real transfer systems"
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"ORTH": "-"},
{"POS": "ADP"},
{"ORTH": "-"},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "state-of-the-art reinforcement learning"
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"ORTH": "-"},
{"POS": "ADP"},
{"ORTH": "-"},
{"POS": "DET"},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "reinforcement learning technique"
{"DEP": "compound"},
{"DEP": "compound"},
{"DEP": "dobj"},
],
[ # e.g. "reinforcement learning strategies"
{"DEP": "compound"},
{"DEP": "compound"},
{"DEP": "nsubjpass"},
],
[ # e.g. "deep reinforcement learning"
{"DEP": "amod"},
{"DEP": "compound"},
{"DEP": "nsubj"},
],
[ # e.g. "continually shrinking segment"
{"DEP": "advmod"},
{"DEP": "amod"},
{"DEP": "pobj"},
],
[ # e.g. "deepening A*"
# "A" and "*" are treated separately
{"DEP": "compound"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
],
4: [
[ # e.g. "deep Reinforcement Learning methods",
# "experimental charge state distributions"
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "dense commercial air traffic",
{"POS": "ADJ"},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "modular deep neural network",
{"POS": "ADJ"},
{"POS": "ADJ"},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "previously unknown thermodynamic cycle",
{"POS": "ADV"},
{"POS": "ADJ"},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "previously unsolvable hard-exploration problems",
{"POS": "ADV"},
{"POS": "ADJ"},
{"POS": "ADJ"},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # "Simultaneous Localization and Mapping"
{"POS": "PROPN"},
{"POS": "PROPN"},
{"POS": "CCONJ"},
{"POS": "PROPN"},
],
[ # e.g. "non-differential convex optimization problem",
{"POS": "ADJ"},
{"ORTH": "-"},
{"POS": "ADJ"},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "multi-goal reinforcement learning algorithm",
{"POS": "ADJ"},
{"ORTH": "-"},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "various benchmark grid-world games",
{"POS": "ADJ"},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "asynchronous advantage actor-critic algorithm",
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "Stochastic Lower Bounds Optimization",
# "task relation learning approach"
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "RL state-feedback boundary controllers",
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "large-scale fault-tolerant quantum computation",
{"POS": "ADJ"},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"ORTH": "-"},
{"POS": "ADJ"},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "iterative deepening A*"
# "A" and "*" are treated separately
{"DEP": "amod"},
{"DEP": "compound"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
],
5: [
[
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "maximum entropy inverse reinforcement learning",
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "poly-time linear programming solution",
{"POS": "ADJ"},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "traditional Simultaneous Localization and Mapping"
{"POS": "ADJ"},
{"POS": "PROPN"},
{"POS": "PROPN"},
{"POS": "CCONJ"},
{"POS": "PROPN"},
],
[ # e.g. "small neural network control law"
{"POS": "ADJ"},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "classical approximate dynamic programming approaches"
{"POS": "ADJ"},
{"POS": "ADJ"},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[
{"POS": "ADJ"},
{"POS": "ADJ"},
{"POS": "ADJ"},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[
{"POS": "ADV"},
{"POS": "ADJ"},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # "completely differentiable deep neural network"
{"POS": "ADV"},
{"POS": "ADJ"},
{"POS": "ADJ"},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
[ # e.g. "two-player general-sum stochastic game framework",
{"POS": "NUM"},
{"ORTH": "-"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": "ADJ"},
{"ORTH": "-"},
{"POS": "ADJ"},
{"POS": {"IN": ["NOUN", "PROPN"]}},
{"POS": {"IN": ["NOUN", "PROPN"]}},
],
],
}
def clean_ngrams(ngrams: Counter) -> Counter:
"""
Remove all n-grams that contain punctuation that is not a
period mark - (',', ':', ';', '?', '!').
This also applies to newline characters (\\n).
Examples:
"code, as well"
"Comments: Has been accepted"
Parameters
----------
ngrams : Counter
A collection of text fragments
Returns
-------
Counter
A new collection of filtered text fragments
"""
punct = [",", ":", ";", "?", "!", "-"]
cleaned_ngrams = Counter(
{
k: v
for (k, v) in ngrams.items()
if not ", " in k and not ": " in k and not k[-1] in punct and not "\n" in k
}
)
return cleaned_ngrams
def lower_ngrams(ngrams: Counter) -> Counter:
"""
Lowercase all strings in the Counter.
Parameters
----------
ngrams : Counter
A Counter of strings, in this case n-grams
Returns
-------
Counter
A Counter of lowercased strings
"""
lowercased_ngrams = Counter()
# Sadly, this doesn't work with a dictionary comprehension
for k, v in ngrams.items():
lowercased_ngrams.update({k.lower(): v})
return lowercased_ngrams
def
|
(ngrams: Counter, threshold: int = 2) -> Counter:
"""
Remove all strings in the Counter with a frequency lower
than a threshold.
Parameters
----------
ngrams : Counter
A Counter of strings, in this case n-grams
Returns
-------
Counter
A Counter of trimmed frequencies
"""
return Counter({k: v for k, v in ngrams.items() if v >= threshold})
def weight_ngrams(ngrams: Counter) -> Counter:
"""
Re-weight the n-grams according to their length
using the formula
n**n*freq(n-gram) if len(s) >= 2
else freq(s)
Parameters
----------
ngrams : Counter
A Counter of strings, in this case n-grams
Returns
-------
Counter
A Counter of re-weighted frequencies
"""
# TODO: If Python 3.8 becomes the minimum version
# this expression can be rewritten with a walrus operator
return Counter({k: len(k.split()) ** len(k.split()) * v for k, v in ngrams.items()})
def ngrams(iterable: Iterable, n=3):
"""
Generate an iterator returning a sequence of adjacent items
in the iterable.
s -> (s0, s1, s2), (s1, s2, s3), (s2, s3, s4), ...
A generalization of the pairwise function from
https://docs.python.org/3/library/itertools.html#itertools-recipes
Parameters
----------
iterable : Iterable
A generic Iterable containing any values.
In the context of this module, these values will be
of type 'str'.
n : int, optional
The order of the n-grams, by default 3
Returns
-------
Iterator
A sequence of adjacent items in this iterable
"""
iterables = tee(iterable, n)
for i, part in enumerate(iterables):
# Shift iterators accordingly
for _ in range(i):
next(part, None)
return zip(*iterables)
def export_ngrams(
docs: Iterator[str], nlp: spacy.language.Language, n: str, patterns=False
) -> Counter:
"""
Extracts n-gram frequencies of a series of documents
Parameters
----------
docs : Iterator[str]
An iterator of documents, e.g. abstracts
nlp : spacy.language.Language
A spaCy language model, e.g. en_core_web_sm
patterns : bool, optional
Further analysis of neighboring tokens, by default False.
If True, a spaCy matcher will be used to filter most of the stopword
combinations that might not be of interest.
The matcher will also extract bigrams made up of three tokens, like
"Alzheimer's disease" and "human-like AI", while filtering most of the
other punctuation.
Returns
-------
Counter
n-gram frequencies
Raises
------
ValueError
In case that the 'patterns' options is used for anything but bigrams
"""
n_grams = Counter()
if "-" in n:
parts = n.split("-")
if len(parts) != 2:
raise ValueError(f"Order of n-grams has wrong format: {n}")
# Potential ValueErrors might be raised here
start = int(parts[0])
end = int(parts[1])
if start > end:
# Just switch it instead of raising an error
end, start = start, end
ns = range(start, end + 1)
else:
ns = [int(n)]
if patterns:
if not all(1 <= i <= 5 for i in ns):
raise ValueError("Patterns can only be used for n-grams with n <= 5.")
matcher = Matcher(nlp.vocab)
for i in ns:
matcher.add(f"{i}-grams", ngram_masks[i])
for doc in tqdm(nlp.pipe(docs)):
matches = matcher(doc)
candidates = (
doc[start:end].text
for _, start, end in matches
if (start - 1 >= 0 and doc[start - 1].text not in ("-") or start == 0)
if (
end != len(doc)
and doc[end].text not in ("-", "*")
or end == len(doc)
)
)
# some n-grams are part of bigger m-grams and might
# start or end with a '-' because of that
n_grams.update(
c
for c in candidates
if not c[0] in ("-", "*", "%") and not c.endswith("-")
)
else:
for doc in tqdm(nlp.pipe(docs)):
for sent in doc.sents:
for i in ns:
n_words = ngrams(sent.text.split(), n=i)
n_grams.update(list(" ".join(words) for words in n_words))
return n_grams
def zstd_pickle(filename: str, obj: Any, protocol: int = 4):
"""
Pickles a Zstandard-compressed object.
Parameters
----------
filename : str
The path to the storage destination of the file
obj : Any
Any picklable object
protocol : int, optional
Protocol to be used for pickling, by default 4.
Protocol version 4 was added in Python 3.4.
"""
cctx = zstd.ZstdCompressor(level=10)
with open(filename, "wb") as fh:
with cctx.stream_writer(fh) as compressor:
pickle.dump(obj, compressor, protocol=protocol)
def fetch_abstracts(db_access: DbAccess) -> Iterator[str]:
"""
Fetches the abstracts of all documents in a collection that have them.
Parameters
----------
db_access : DbAccess
Access to the collection of the ArangoDB database one wants to access
Returns
-------
Iterator[str]
An iterator of all available abstracts
"""
aql = f"FOR x IN {db_access.collection.name} FILTER x.abstract != null RETURN x.abstract"
docs = db_access.database.AQLQuery(aql, rawResults=True, batchSize=100, ttl=60)
return docs
if __name__ == "__main__":
PARSER = argparse.ArgumentParser(description="Export bigram frequencies to file")
PARSER.add_argument(
"output",
type=str,
help="Where to store the pickled n-grams",
)
PARSER.add_argument(
"--patterns",
action="store_true",
help="Use a spaCy matcher to extract n-grams. n has to be between 1 and 5.",
)
PARSER.add_argument(
"-n",
type=str,
default="2-3",
help="The order of the n-grams, by default 2-3",
)
PARSER.add_argument(
"--threshold",
"-t",
type=int,
default=0,
help="A threshold for n-gram frequencies to be kept, by default 0",
)
PARSER.add_argument(
"--weighting",
action="store_true",
help="Should the frequenies be re-weighted by their n-gram lengths?",
)
ARGS = PARSER.parse_args()
try:
arango_access = setup()
db_docs = fetch_abstracts(arango_access)
except ScicopiaException as e:
print(e)
else:
spacy_model = spacy.load("en_core_web_lg", exclude=["ner", "textcat"])
PATTERNS = ARGS.patterns
try:
frequencies = export_ngrams(db_docs, spacy_model, ARGS.n, PATTERNS)
except ValueError as e:
print(f"Value of n: {e}")
else:
frequencies = clean_ngrams(frequencies)
frequencies = lower_ngrams(frequencies)
THRESHOLD = ARGS.threshold
if THRESHOLD <= 0:
pass
else:
frequencies = trim_ngrams(frequencies)
if ARGS.weighting:
frequencies = weight_ngrams(frequencies)
zstd_pickle(ARGS.output, frequencies)
|
trim_ngrams
|
update.pb.go
|
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: update.proto
package pipeline
import (
fmt "fmt"
_ "github.com/gogo/protobuf/gogoproto"
proto "github.com/gogo/protobuf/proto"
_ "github.com/mwitkow/go-proto-validators"
pipeline "github.com/easyopsapis/easyops-api-go/protorepo-models/easyops/model/pipeline"
math "math"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
//
//Update请求
type UpdateRequest struct {
//
//按project id过滤pipeline
ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id" form:"project_id"`
//
//要修改的pipeline id
PipelineId string `protobuf:"bytes,2,opt,name=pipeline_id,json=pipelineId,proto3" json:"pipeline_id" form:"pipeline_id"`
//
//流水线
Pipeline *pipeline.Pipeline `protobuf:"bytes,3,opt,name=pipeline,proto3" json:"pipeline" form:"pipeline"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *UpdateRequest) Reset() { *m = UpdateRequest{} }
func (m *UpdateRequest) String() string { return proto.CompactTextString(m) }
func (*UpdateRequest) ProtoMessage() {}
func (*UpdateRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_3f0fa214029f1c21, []int{0}
}
func (m *UpdateRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_UpdateRequest.Unmarshal(m, b)
}
func (m *UpdateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_UpdateRequest.Marshal(b, m, deterministic)
}
func (m *UpdateRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_UpdateRequest.Merge(m, src)
}
func (m *UpdateRequest) XXX_Size() int {
return xxx_messageInfo_UpdateRequest.Size(m)
}
func (m *UpdateRequest) XXX_DiscardUnknown() {
xxx_messageInfo_UpdateRequest.DiscardUnknown(m)
}
var xxx_messageInfo_UpdateRequest proto.InternalMessageInfo
func (m *UpdateRequest) GetProjectId() string {
if m != nil {
return m.ProjectId
}
return ""
}
func (m *UpdateRequest) GetPipelineId() string {
if m != nil {
return m.PipelineId
}
return ""
}
func (m *UpdateRequest) GetPipeline() *pipeline.Pipeline {
if m != nil {
return m.Pipeline
}
return nil
}
//
//UpdateApi返回
type UpdateResponseWrapper struct {
//
//返回码
Code int32 `protobuf:"varint,1,opt,name=code,proto3" json:"code" form:"code"`
//
//返回码解释
CodeExplain string `protobuf:"bytes,2,opt,name=codeExplain,proto3" json:"codeExplain" form:"codeExplain"`
//
//错误详情
Error string `protobuf:"bytes,3,opt,name=error,proto3" json:"error" form:"error"`
//
//返回数据
Data *pipeline.Pipeline `protobuf:"bytes,4,opt,name=data,proto3" json:"data" form:"data"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *UpdateResponseWrapper) Reset() { *m = UpdateResponseWrapper{} }
func (m *UpdateResponseWrapper) String() string { return proto.CompactTextString(m) }
func (*UpdateResponseWrapper) ProtoMessage() {}
func (*UpdateResponseWrapper) Descriptor() ([]byte, []int) {
return fileDescriptor_3f0fa214029f1c21, []int{1}
}
func (m *UpdateResponseWrapper) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_UpdateResponseWrapper.Unmarshal(m, b)
}
func (m *UpdateResponseWrapper) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_UpdateResponseWrapper.Marshal(b, m, deterministic)
}
func (m *UpdateResponseWrapper) XXX_Merge(src proto.Message) {
xxx_messageInfo_UpdateResponseWrapper.Merge(m, src)
}
func (m *UpdateResponseWrapper) XXX_Size() int {
return xxx_messageInfo_UpdateResponseWrapper.Size(m)
}
func (m *UpdateResponseWrapper) XXX_DiscardUnknown() {
xxx_messageInfo_UpdateResponseWrapper.DiscardUnknown(m)
}
var xxx_messageInfo_UpdateResponseWrapper proto.InternalMessageInfo
func (m *UpdateResponseWrapper) GetCode() int32 {
if m != nil {
return m.Code
}
return 0
}
func (m *UpdateResponseWrapper) GetCodeExplain() string {
if m != nil {
return m.CodeExplain
}
return ""
}
func (m *UpdateResponseWrapper) GetError() string {
if m != nil {
return m.Error
}
return ""
}
func (m *UpdateResponseWrapper) GetData() *pipeline.Pipeline {
if m != nil {
return m.Data
}
return nil
}
func init() {
proto.RegisterType((*UpdateRequest)(nil), "pipelin
|
dateRequest")
proto.RegisterType((*UpdateResponseWrapper)(nil), "pipeline.UpdateResponseWrapper")
}
func init() { proto.RegisterFile("update.proto", fileDescriptor_3f0fa214029f1c21) }
var fileDescriptor_3f0fa214029f1c21 = []byte{
// 402 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x51, 0x4b, 0xab, 0xd4, 0x30,
0x14, 0xa6, 0x3a, 0x57, 0x9c, 0xf4, 0xea, 0x1d, 0x23, 0xca, 0x30, 0x9b, 0x0e, 0x51, 0x64, 0x14,
0xda, 0xa8, 0x03, 0xbe, 0x96, 0x15, 0xc1, 0xc1, 0x8d, 0x16, 0xc4, 0x85, 0xa8, 0x64, 0x9a, 0x4c,
0xad, 0xb6, 0x3d, 0x31, 0x49, 0x1d, 0x1f, 0xf8, 0x3b, 0xdd, 0x55, 0x70, 0xeb, 0xae, 0xbf, 0x40,
0x9a, 0x3e, 0xa6, 0x82, 0x77, 0xd5, 0x73, 0xf2, 0x3d, 0xfa, 0x7d, 0x09, 0x3a, 0x2e, 0x25, 0x67,
0x46, 0x04, 0x52, 0x81, 0x01, 0x7c, 0x5e, 0xa6, 0x52, 0x64, 0x69, 0x21, 0x16, 0x7e, 0x92, 0x9a,
0xf7, 0xe5, 0x36, 0x88, 0x21, 0xa7, 0x09, 0x24, 0x40, 0x2d, 0x61, 0x5b, 0xee, 0xec, 0x66, 0x17,
0x3b, 0xb5, 0xc2, 0xc5, 0x8b, 0x04, 0x02, 0xc1, 0xf4, 0x57, 0x90, 0x3a, 0xc8, 0x20, 0x66, 0x19,
0x8d, 0xa1, 0x30, 0x8a, 0xc5, 0x46, 0xb7, 0x4a, 0x25, 0x24, 0xf8, 0x39, 0x70, 0x91, 0x69, 0xda,
0x11, 0xa9, 0x5d, 0x69, 0xff, 0xcb, 0x61, 0xe8, 0x2c, 0xef, 0x8d, 0x12, 0xe4, 0xfb, 0xd4, 0x7c,
0x84, 0x3d, 0x4d, 0xc0, 0xb7, 0xa0, 0xff, 0x99, 0x65, 0x29, 0x67, 0x06, 0x94, 0xa6, 0xc3, 0xd8,
0xea, 0xc8, 0x1f, 0x07, 0x5d, 0x78, 0x69, 0x4b, 0x45, 0xe2, 0x53, 0x29, 0xb4, 0xc1, 0x4f, 0x11,
0x92, 0x0a, 0x3e, 0x88, 0xd8, 0xbc, 0x4b, 0xf9, 0xdc, 0x59, 0x3a, 0xab, 0x69, 0x78, 0xb3, 0xae,
0xbc, 0x4b, 0x3b, 0x50, 0xf9, 0x23, 0x72, 0xc0, 0xc8, 0xef, 0x5f, 0xde, 0x0c, 0x5d, 0x7c, 0xfb,
0xfa, 0xb6, 0xff, 0x90, 0xf9, 0xdf, 0xde, 0x7c, 0xbf, 0xb3, 0xfe, 0x71, 0x3d, 0x9a, 0x76, 0x84,
0x0d, 0xc7, 0xcf, 0x90, 0xdb, 0xa7, 0x6c, 0xac, 0xce, 0x58, 0xab, 0x5b, 0x75, 0xe5, 0xe1, 0xce,
0xea, 0x00, 0xfe, 0xdf, 0x0b, 0xf5, 0x8c, 0x0d, 0xc7, 0x8f, 0xd1, 0x70, 0xdd, 0xf3, 0xb3, 0x4b,
0x67, 0xe5, 0xde, 0xc5, 0xc1, 0x70, 0x07, 0xcf, 0xbb, 0x21, 0xbc, 0x5c, 0x57, 0xde, 0xc9, 0xbf,
0xee, 0x24, 0x1a, 0x84, 0xe4, 0xa7, 0x83, 0xae, 0xf4, 0x6d, 0xb5, 0x84, 0x42, 0x8b, 0x57, 0x8a,
0x49, 0x29, 0x14, 0xbe, 0x86, 0x26, 0x31, 0x70, 0x61, 0xfb, 0x1e, 0x85, 0x27, 0x75, 0xe5, 0xb9,
0xad, 0x4d, 0x73, 0x4a, 0x22, 0x0b, 0xe2, 0x07, 0xc8, 0x6d, 0xbe, 0x4f, 0xbe, 0xc8, 0x8c, 0xa5,
0x45, 0x57, 0xe8, 0xea, 0xa1, 0xd0, 0x08, 0x24, 0xd1, 0x98, 0x8a, 0x6f, 0xa0, 0x23, 0xa1, 0x14,
0x28, 0x1b, 0x7d, 0x1a, 0xce, 0xea, 0xca, 0x3b, 0x6e, 0x35, 0xf6, 0x98, 0x44, 0x2d, 0x8c, 0xef,
0xa3, 0x09, 0x67, 0x86, 0xcd, 0x27, 0xa7, 0x36, 0x1c, 0x45, 0x6b, 0x98, 0x24, 0xb2, 0x82, 0xed,
0x39, 0xfb, 0x9c, 0xeb, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x6d, 0xf9, 0x52, 0x8f, 0xa2, 0x02,
0x00, 0x00,
}
|
e.Up
|
deserializers.go
|
// Code generated by smithy-go-codegen DO NOT EDIT.
package devopsguru
import (
"bytes"
"context"
"encoding/json"
"fmt"
"github.com/aws/aws-sdk-go-v2/aws/protocol/restjson"
"github.com/aws/aws-sdk-go-v2/service/devopsguru/types"
smithy "github.com/aws/smithy-go"
smithyio "github.com/aws/smithy-go/io"
"github.com/aws/smithy-go/middleware"
"github.com/aws/smithy-go/ptr"
smithytime "github.com/aws/smithy-go/time"
smithyhttp "github.com/aws/smithy-go/transport/http"
"io"
"math"
"strconv"
"strings"
)
type awsRestjson1_deserializeOpAddNotificationChannel struct {
}
func (*awsRestjson1_deserializeOpAddNotificationChannel) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpAddNotificationChannel) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorAddNotificationChannel(response, &metadata)
}
output := &AddNotificationChannelOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentAddNotificationChannelOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorAddNotificationChannel(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("ConflictException", errorCode):
return awsRestjson1_deserializeErrorConflictException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ResourceNotFoundException", errorCode):
return awsRestjson1_deserializeErrorResourceNotFoundException(response, errorBody)
case strings.EqualFold("ServiceQuotaExceededException", errorCode):
return awsRestjson1_deserializeErrorServiceQuotaExceededException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentAddNotificationChannelOutput(v **AddNotificationChannelOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *AddNotificationChannelOutput
if *v == nil {
sv = &AddNotificationChannelOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Id":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected NotificationChannelId to be of type string, got %T instead", value)
}
sv.Id = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpDescribeAccountHealth struct {
}
func (*awsRestjson1_deserializeOpDescribeAccountHealth) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpDescribeAccountHealth) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorDescribeAccountHealth(response, &metadata)
}
output := &DescribeAccountHealthOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentDescribeAccountHealthOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorDescribeAccountHealth(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentDescribeAccountHealthOutput(v **DescribeAccountHealthOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *DescribeAccountHealthOutput
if *v == nil {
sv = &DescribeAccountHealthOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "MetricsAnalyzed":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected NumMetricsAnalyzed to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.MetricsAnalyzed = int32(i64)
}
case "OpenProactiveInsights":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected NumOpenProactiveInsights to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.OpenProactiveInsights = int32(i64)
}
case "OpenReactiveInsights":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected NumOpenReactiveInsights to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.OpenReactiveInsights = int32(i64)
}
case "ResourceHours":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected ResourceHours to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.ResourceHours = ptr.Int64(i64)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpDescribeAccountOverview struct {
}
func (*awsRestjson1_deserializeOpDescribeAccountOverview) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpDescribeAccountOverview) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorDescribeAccountOverview(response, &metadata)
}
output := &DescribeAccountOverviewOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentDescribeAccountOverviewOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorDescribeAccountOverview(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentDescribeAccountOverviewOutput(v **DescribeAccountOverviewOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *DescribeAccountOverviewOutput
if *v == nil {
sv = &DescribeAccountOverviewOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "MeanTimeToRecoverInMilliseconds":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected MeanTimeToRecoverInMilliseconds to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.MeanTimeToRecoverInMilliseconds = ptr.Int64(i64)
}
case "ProactiveInsights":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected NumProactiveInsights to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.ProactiveInsights = int32(i64)
}
case "ReactiveInsights":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected NumReactiveInsights to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.ReactiveInsights = int32(i64)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpDescribeAnomaly struct {
}
func (*awsRestjson1_deserializeOpDescribeAnomaly) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpDescribeAnomaly) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorDescribeAnomaly(response, &metadata)
}
output := &DescribeAnomalyOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentDescribeAnomalyOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorDescribeAnomaly(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ResourceNotFoundException", errorCode):
return awsRestjson1_deserializeErrorResourceNotFoundException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentDescribeAnomalyOutput(v **DescribeAnomalyOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *DescribeAnomalyOutput
if *v == nil {
sv = &DescribeAnomalyOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "ProactiveAnomaly":
if err := awsRestjson1_deserializeDocumentProactiveAnomaly(&sv.ProactiveAnomaly, value); err != nil {
return err
}
case "ReactiveAnomaly":
if err := awsRestjson1_deserializeDocumentReactiveAnomaly(&sv.ReactiveAnomaly, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpDescribeFeedback struct {
}
func (*awsRestjson1_deserializeOpDescribeFeedback) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpDescribeFeedback) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorDescribeFeedback(response, &metadata)
}
output := &DescribeFeedbackOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentDescribeFeedbackOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorDescribeFeedback(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ResourceNotFoundException", errorCode):
return awsRestjson1_deserializeErrorResourceNotFoundException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentDescribeFeedbackOutput(v **DescribeFeedbackOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *DescribeFeedbackOutput
if *v == nil {
sv = &DescribeFeedbackOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "InsightFeedback":
if err := awsRestjson1_deserializeDocumentInsightFeedback(&sv.InsightFeedback, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpDescribeInsight struct {
}
func (*awsRestjson1_deserializeOpDescribeInsight) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpDescribeInsight) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorDescribeInsight(response, &metadata)
}
output := &DescribeInsightOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentDescribeInsightOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorDescribeInsight(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ResourceNotFoundException", errorCode):
return awsRestjson1_deserializeErrorResourceNotFoundException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentDescribeInsightOutput(v **DescribeInsightOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *DescribeInsightOutput
if *v == nil {
sv = &DescribeInsightOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "ProactiveInsight":
if err := awsRestjson1_deserializeDocumentProactiveInsight(&sv.ProactiveInsight, value); err != nil {
return err
}
case "ReactiveInsight":
if err := awsRestjson1_deserializeDocumentReactiveInsight(&sv.ReactiveInsight, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpDescribeOrganizationHealth struct {
}
func (*awsRestjson1_deserializeOpDescribeOrganizationHealth) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpDescribeOrganizationHealth) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorDescribeOrganizationHealth(response, &metadata)
}
output := &DescribeOrganizationHealthOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentDescribeOrganizationHealthOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorDescribeOrganizationHealth(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentDescribeOrganizationHealthOutput(v **DescribeOrganizationHealthOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *DescribeOrganizationHealthOutput
if *v == nil {
sv = &DescribeOrganizationHealthOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "MetricsAnalyzed":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected NumMetricsAnalyzed to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.MetricsAnalyzed = int32(i64)
}
case "OpenProactiveInsights":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected NumOpenProactiveInsights to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.OpenProactiveInsights = int32(i64)
}
case "OpenReactiveInsights":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected NumOpenReactiveInsights to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.OpenReactiveInsights = int32(i64)
}
case "ResourceHours":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected ResourceHours to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.ResourceHours = ptr.Int64(i64)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpDescribeOrganizationOverview struct {
}
func (*awsRestjson1_deserializeOpDescribeOrganizationOverview) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpDescribeOrganizationOverview) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorDescribeOrganizationOverview(response, &metadata)
}
output := &DescribeOrganizationOverviewOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentDescribeOrganizationOverviewOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorDescribeOrganizationOverview(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentDescribeOrganizationOverviewOutput(v **DescribeOrganizationOverviewOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *DescribeOrganizationOverviewOutput
if *v == nil {
sv = &DescribeOrganizationOverviewOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "ProactiveInsights":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected NumProactiveInsights to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.ProactiveInsights = int32(i64)
}
case "ReactiveInsights":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected NumReactiveInsights to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.ReactiveInsights = int32(i64)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpDescribeOrganizationResourceCollectionHealth struct {
}
func (*awsRestjson1_deserializeOpDescribeOrganizationResourceCollectionHealth) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpDescribeOrganizationResourceCollectionHealth) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorDescribeOrganizationResourceCollectionHealth(response, &metadata)
}
output := &DescribeOrganizationResourceCollectionHealthOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentDescribeOrganizationResourceCollectionHealthOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorDescribeOrganizationResourceCollectionHealth(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentDescribeOrganizationResourceCollectionHealthOutput(v **DescribeOrganizationResourceCollectionHealthOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *DescribeOrganizationResourceCollectionHealthOutput
if *v == nil {
sv = &DescribeOrganizationResourceCollectionHealthOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Account":
if err := awsRestjson1_deserializeDocumentAccountHealths(&sv.Account, value); err != nil {
return err
}
case "CloudFormation":
if err := awsRestjson1_deserializeDocumentCloudFormationHealths(&sv.CloudFormation, value); err != nil {
return err
}
case "NextToken":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected UuidNextToken to be of type string, got %T instead", value)
}
sv.NextToken = ptr.String(jtv)
}
case "Service":
if err := awsRestjson1_deserializeDocumentServiceHealths(&sv.Service, value); err != nil {
return err
}
case "Tags":
if err := awsRestjson1_deserializeDocumentTagHealths(&sv.Tags, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpDescribeResourceCollectionHealth struct {
}
func (*awsRestjson1_deserializeOpDescribeResourceCollectionHealth) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpDescribeResourceCollectionHealth) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorDescribeResourceCollectionHealth(response, &metadata)
}
output := &DescribeResourceCollectionHealthOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentDescribeResourceCollectionHealthOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorDescribeResourceCollectionHealth(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentDescribeResourceCollectionHealthOutput(v **DescribeResourceCollectionHealthOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *DescribeResourceCollectionHealthOutput
if *v == nil {
sv = &DescribeResourceCollectionHealthOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "CloudFormation":
if err := awsRestjson1_deserializeDocumentCloudFormationHealths(&sv.CloudFormation, value); err != nil {
return err
}
case "NextToken":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected UuidNextToken to be of type string, got %T instead", value)
}
sv.NextToken = ptr.String(jtv)
}
case "Service":
if err := awsRestjson1_deserializeDocumentServiceHealths(&sv.Service, value); err != nil {
return err
}
case "Tags":
if err := awsRestjson1_deserializeDocumentTagHealths(&sv.Tags, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpDescribeServiceIntegration struct {
}
func (*awsRestjson1_deserializeOpDescribeServiceIntegration) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpDescribeServiceIntegration) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorDescribeServiceIntegration(response, &metadata)
}
output := &DescribeServiceIntegrationOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentDescribeServiceIntegrationOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorDescribeServiceIntegration(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentDescribeServiceIntegrationOutput(v **DescribeServiceIntegrationOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *DescribeServiceIntegrationOutput
if *v == nil {
sv = &DescribeServiceIntegrationOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "ServiceIntegration":
if err := awsRestjson1_deserializeDocumentServiceIntegrationConfig(&sv.ServiceIntegration, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpGetCostEstimation struct {
}
func (*awsRestjson1_deserializeOpGetCostEstimation) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpGetCostEstimation) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorGetCostEstimation(response, &metadata)
}
output := &GetCostEstimationOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentGetCostEstimationOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorGetCostEstimation(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ResourceNotFoundException", errorCode):
return awsRestjson1_deserializeErrorResourceNotFoundException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentGetCostEstimationOutput(v **GetCostEstimationOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *GetCostEstimationOutput
if *v == nil {
sv = &GetCostEstimationOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Costs":
if err := awsRestjson1_deserializeDocumentServiceResourceCosts(&sv.Costs, value); err != nil {
return err
}
case "NextToken":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected UuidNextToken to be of type string, got %T instead", value)
}
sv.NextToken = ptr.String(jtv)
}
case "ResourceCollection":
if err := awsRestjson1_deserializeDocumentCostEstimationResourceCollectionFilter(&sv.ResourceCollection, value); err != nil {
return err
}
case "Status":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected CostEstimationStatus to be of type string, got %T instead", value)
}
sv.Status = types.CostEstimationStatus(jtv)
}
case "TimeRange":
if err := awsRestjson1_deserializeDocumentCostEstimationTimeRange(&sv.TimeRange, value); err != nil {
return err
}
case "TotalCost":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.TotalCost = f64
case string:
var f64 float64
switch {
case strings.EqualFold(jtv, "NaN"):
f64 = math.NaN()
case strings.EqualFold(jtv, "Infinity"):
f64 = math.Inf(1)
case strings.EqualFold(jtv, "-Infinity"):
f64 = math.Inf(-1)
default:
return fmt.Errorf("unknown JSON number value: %s", jtv)
}
sv.TotalCost = f64
default:
return fmt.Errorf("expected Cost to be a JSON Number, got %T instead", value)
}
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpGetResourceCollection struct {
}
func (*awsRestjson1_deserializeOpGetResourceCollection) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpGetResourceCollection) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorGetResourceCollection(response, &metadata)
}
output := &GetResourceCollectionOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentGetResourceCollectionOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorGetResourceCollection(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ResourceNotFoundException", errorCode):
return awsRestjson1_deserializeErrorResourceNotFoundException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentGetResourceCollectionOutput(v **GetResourceCollectionOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *GetResourceCollectionOutput
if *v == nil {
sv = &GetResourceCollectionOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "NextToken":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected UuidNextToken to be of type string, got %T instead", value)
}
sv.NextToken = ptr.String(jtv)
}
case "ResourceCollection":
if err := awsRestjson1_deserializeDocumentResourceCollectionFilter(&sv.ResourceCollection, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpListAnomaliesForInsight struct {
}
func (*awsRestjson1_deserializeOpListAnomaliesForInsight) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpListAnomaliesForInsight) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorListAnomaliesForInsight(response, &metadata)
}
output := &ListAnomaliesForInsightOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentListAnomaliesForInsightOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorListAnomaliesForInsight(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ResourceNotFoundException", errorCode):
return awsRestjson1_deserializeErrorResourceNotFoundException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentListAnomaliesForInsightOutput(v **ListAnomaliesForInsightOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *ListAnomaliesForInsightOutput
if *v == nil {
sv = &ListAnomaliesForInsightOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "NextToken":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected UuidNextToken to be of type string, got %T instead", value)
}
sv.NextToken = ptr.String(jtv)
}
case "ProactiveAnomalies":
if err := awsRestjson1_deserializeDocumentProactiveAnomalies(&sv.ProactiveAnomalies, value); err != nil {
return err
}
case "ReactiveAnomalies":
if err := awsRestjson1_deserializeDocumentReactiveAnomalies(&sv.ReactiveAnomalies, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpListEvents struct {
}
func (*awsRestjson1_deserializeOpListEvents) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpListEvents) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorListEvents(response, &metadata)
}
output := &ListEventsOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentListEventsOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorListEvents(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ResourceNotFoundException", errorCode):
return awsRestjson1_deserializeErrorResourceNotFoundException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentListEventsOutput(v **ListEventsOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *ListEventsOutput
if *v == nil {
sv = &ListEventsOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Events":
if err := awsRestjson1_deserializeDocumentEvents(&sv.Events, value); err != nil {
return err
}
case "NextToken":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected UuidNextToken to be of type string, got %T instead", value)
}
sv.NextToken = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpListInsights struct {
}
func (*awsRestjson1_deserializeOpListInsights) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpListInsights) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorListInsights(response, &metadata)
}
output := &ListInsightsOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentListInsightsOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorListInsights(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentListInsightsOutput(v **ListInsightsOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *ListInsightsOutput
if *v == nil {
sv = &ListInsightsOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "NextToken":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected UuidNextToken to be of type string, got %T instead", value)
}
sv.NextToken = ptr.String(jtv)
}
case "ProactiveInsights":
if err := awsRestjson1_deserializeDocumentProactiveInsights(&sv.ProactiveInsights, value); err != nil {
return err
}
case "ReactiveInsights":
if err := awsRestjson1_deserializeDocumentReactiveInsights(&sv.ReactiveInsights, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpListNotificationChannels struct {
}
func (*awsRestjson1_deserializeOpListNotificationChannels) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpListNotificationChannels) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorListNotificationChannels(response, &metadata)
}
output := &ListNotificationChannelsOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentListNotificationChannelsOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorListNotificationChannels(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentListNotificationChannelsOutput(v **ListNotificationChannelsOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *ListNotificationChannelsOutput
if *v == nil {
sv = &ListNotificationChannelsOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Channels":
if err := awsRestjson1_deserializeDocumentChannels(&sv.Channels, value); err != nil {
return err
}
case "NextToken":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected UuidNextToken to be of type string, got %T instead", value)
}
sv.NextToken = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpListOrganizationInsights struct {
}
func (*awsRestjson1_deserializeOpListOrganizationInsights) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpListOrganizationInsights) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorListOrganizationInsights(response, &metadata)
}
output := &ListOrganizationInsightsOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentListOrganizationInsightsOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorListOrganizationInsights(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentListOrganizationInsightsOutput(v **ListOrganizationInsightsOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *ListOrganizationInsightsOutput
if *v == nil {
sv = &ListOrganizationInsightsOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "NextToken":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected UuidNextToken to be of type string, got %T instead", value)
}
sv.NextToken = ptr.String(jtv)
}
case "ProactiveInsights":
if err := awsRestjson1_deserializeDocumentProactiveOrganizationInsights(&sv.ProactiveInsights, value); err != nil {
return err
}
case "ReactiveInsights":
if err := awsRestjson1_deserializeDocumentReactiveOrganizationInsights(&sv.ReactiveInsights, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpListRecommendations struct {
}
func (*awsRestjson1_deserializeOpListRecommendations) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpListRecommendations) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorListRecommendations(response, &metadata)
}
output := &ListRecommendationsOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentListRecommendationsOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorListRecommendations(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ResourceNotFoundException", errorCode):
return awsRestjson1_deserializeErrorResourceNotFoundException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentListRecommendationsOutput(v **ListRecommendationsOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *ListRecommendationsOutput
if *v == nil {
sv = &ListRecommendationsOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "NextToken":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected UuidNextToken to be of type string, got %T instead", value)
}
sv.NextToken = ptr.String(jtv)
}
case "Recommendations":
if err := awsRestjson1_deserializeDocumentRecommendations(&sv.Recommendations, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpPutFeedback struct {
}
func (*awsRestjson1_deserializeOpPutFeedback) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpPutFeedback) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorPutFeedback(response, &metadata)
}
output := &PutFeedbackOutput{}
out.Result = output
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorPutFeedback(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("ConflictException", errorCode):
return awsRestjson1_deserializeErrorConflictException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ResourceNotFoundException", errorCode):
return awsRestjson1_deserializeErrorResourceNotFoundException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
type awsRestjson1_deserializeOpRemoveNotificationChannel struct {
}
func (*awsRestjson1_deserializeOpRemoveNotificationChannel) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpRemoveNotificationChannel) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorRemoveNotificationChannel(response, &metadata)
}
output := &RemoveNotificationChannelOutput{}
out.Result = output
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorRemoveNotificationChannel(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("ConflictException", errorCode):
return awsRestjson1_deserializeErrorConflictException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ResourceNotFoundException", errorCode):
return awsRestjson1_deserializeErrorResourceNotFoundException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
type awsRestjson1_deserializeOpSearchInsights struct {
}
func (*awsRestjson1_deserializeOpSearchInsights) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpSearchInsights) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorSearchInsights(response, &metadata)
}
output := &SearchInsightsOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentSearchInsightsOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorSearchInsights(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentSearchInsightsOutput(v **SearchInsightsOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *SearchInsightsOutput
if *v == nil {
sv = &SearchInsightsOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "NextToken":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected UuidNextToken to be of type string, got %T instead", value)
}
sv.NextToken = ptr.String(jtv)
}
case "ProactiveInsights":
if err := awsRestjson1_deserializeDocumentProactiveInsights(&sv.ProactiveInsights, value); err != nil {
return err
}
case "ReactiveInsights":
if err := awsRestjson1_deserializeDocumentReactiveInsights(&sv.ReactiveInsights, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpSearchOrganizationInsights struct {
}
func (*awsRestjson1_deserializeOpSearchOrganizationInsights) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpSearchOrganizationInsights) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorSearchOrganizationInsights(response, &metadata)
}
output := &SearchOrganizationInsightsOutput{}
out.Result = output
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(response.Body, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return out, metadata, err
}
err = awsRestjson1_deserializeOpDocumentSearchOrganizationInsightsOutput(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
return out, metadata, &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body with invalid JSON, %w", err),
Snapshot: snapshot.Bytes(),
}
}
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorSearchOrganizationInsights(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpDocumentSearchOrganizationInsightsOutput(v **SearchOrganizationInsightsOutput, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *SearchOrganizationInsightsOutput
if *v == nil {
sv = &SearchOrganizationInsightsOutput{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "NextToken":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected UuidNextToken to be of type string, got %T instead", value)
}
sv.NextToken = ptr.String(jtv)
}
case "ProactiveInsights":
if err := awsRestjson1_deserializeDocumentProactiveInsights(&sv.ProactiveInsights, value); err != nil {
return err
}
case "ReactiveInsights":
if err := awsRestjson1_deserializeDocumentReactiveInsights(&sv.ReactiveInsights, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
type awsRestjson1_deserializeOpStartCostEstimation struct {
}
func (*awsRestjson1_deserializeOpStartCostEstimation) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpStartCostEstimation) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorStartCostEstimation(response, &metadata)
}
output := &StartCostEstimationOutput{}
out.Result = output
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorStartCostEstimation(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("ConflictException", errorCode):
return awsRestjson1_deserializeErrorConflictException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ResourceNotFoundException", errorCode):
return awsRestjson1_deserializeErrorResourceNotFoundException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
type awsRestjson1_deserializeOpUpdateResourceCollection struct {
}
func (*awsRestjson1_deserializeOpUpdateResourceCollection) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpUpdateResourceCollection) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorUpdateResourceCollection(response, &metadata)
}
output := &UpdateResourceCollectionOutput{}
out.Result = output
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorUpdateResourceCollection(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("ConflictException", errorCode):
return awsRestjson1_deserializeErrorConflictException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
type awsRestjson1_deserializeOpUpdateServiceIntegration struct {
}
func (*awsRestjson1_deserializeOpUpdateServiceIntegration) ID() string {
return "OperationDeserializer"
}
func (m *awsRestjson1_deserializeOpUpdateServiceIntegration) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) (
out middleware.DeserializeOutput, metadata middleware.Metadata, err error,
) {
out, metadata, err = next.HandleDeserialize(ctx, in)
if err != nil {
return out, metadata, err
}
response, ok := out.RawResponse.(*smithyhttp.Response)
if !ok {
return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)}
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
return out, metadata, awsRestjson1_deserializeOpErrorUpdateServiceIntegration(response, &metadata)
}
output := &UpdateServiceIntegrationOutput{}
out.Result = output
return out, metadata, err
}
func awsRestjson1_deserializeOpErrorUpdateServiceIntegration(response *smithyhttp.Response, metadata *middleware.Metadata) error {
var errorBuffer bytes.Buffer
if _, err := io.Copy(&errorBuffer, response.Body); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)}
}
errorBody := bytes.NewReader(errorBuffer.Bytes())
errorCode := "UnknownError"
errorMessage := errorCode
code := response.Header.Get("X-Amzn-ErrorType")
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
code, message, err := restjson.GetErrorInfo(decoder)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if len(code) != 0 {
errorCode = restjson.SanitizeErrorCode(code)
}
if len(message) != 0 {
errorMessage = message
}
switch {
case strings.EqualFold("AccessDeniedException", errorCode):
return awsRestjson1_deserializeErrorAccessDeniedException(response, errorBody)
case strings.EqualFold("ConflictException", errorCode):
return awsRestjson1_deserializeErrorConflictException(response, errorBody)
case strings.EqualFold("InternalServerException", errorCode):
return awsRestjson1_deserializeErrorInternalServerException(response, errorBody)
case strings.EqualFold("ThrottlingException", errorCode):
return awsRestjson1_deserializeErrorThrottlingException(response, errorBody)
case strings.EqualFold("ValidationException", errorCode):
return awsRestjson1_deserializeErrorValidationException(response, errorBody)
default:
genericError := &smithy.GenericAPIError{
Code: errorCode,
Message: errorMessage,
}
return genericError
}
}
func awsRestjson1_deserializeOpHttpBindingsInternalServerException(v *types.InternalServerException, response *smithyhttp.Response) error {
if v == nil {
return fmt.Errorf("unsupported deserialization for nil %T", v)
}
if headerValues := response.Header.Values("Retry-After"); len(headerValues) != 0 {
headerValues[0] = strings.TrimSpace(headerValues[0])
vv, err := strconv.ParseInt(headerValues[0], 0, 32)
if err != nil {
return err
}
v.RetryAfterSeconds = int32(vv)
}
return nil
}
func awsRestjson1_deserializeOpHttpBindingsThrottlingException(v *types.ThrottlingException, response *smithyhttp.Response) error {
if v == nil {
return fmt.Errorf("unsupported deserialization for nil %T", v)
}
if headerValues := response.Header.Values("Retry-After"); len(headerValues) != 0 {
headerValues[0] = strings.TrimSpace(headerValues[0])
vv, err := strconv.ParseInt(headerValues[0], 0, 32)
if err != nil {
return err
}
v.RetryAfterSeconds = int32(vv)
}
return nil
}
func awsRestjson1_deserializeErrorAccessDeniedException(response *smithyhttp.Response, errorBody *bytes.Reader) error {
output := &types.AccessDeniedException{}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
err := awsRestjson1_deserializeDocumentAccessDeniedException(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
return output
}
func awsRestjson1_deserializeErrorConflictException(response *smithyhttp.Response, errorBody *bytes.Reader) error {
output := &types.ConflictException{}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
err := awsRestjson1_deserializeDocumentConflictException(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
return output
}
func awsRestjson1_deserializeErrorInternalServerException(response *smithyhttp.Response, errorBody *bytes.Reader) error {
output := &types.InternalServerException{}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
err := awsRestjson1_deserializeDocumentInternalServerException(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if err := awsRestjson1_deserializeOpHttpBindingsInternalServerException(output, response); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to decode response error with invalid HTTP bindings, %w", err)}
}
return output
}
func awsRestjson1_deserializeErrorResourceNotFoundException(response *smithyhttp.Response, errorBody *bytes.Reader) error {
output := &types.ResourceNotFoundException{}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
err := awsRestjson1_deserializeDocumentResourceNotFoundException(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
return output
}
func awsRestjson1_deserializeErrorServiceQuotaExceededException(response *smithyhttp.Response, errorBody *bytes.Reader) error {
output := &types.ServiceQuotaExceededException{}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
err := awsRestjson1_deserializeDocumentServiceQuotaExceededException(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
return output
}
func awsRestjson1_deserializeErrorThrottlingException(response *smithyhttp.Response, errorBody *bytes.Reader) error {
output := &types.ThrottlingException{}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
err := awsRestjson1_deserializeDocumentThrottlingException(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
if err := awsRestjson1_deserializeOpHttpBindingsThrottlingException(output, response); err != nil {
return &smithy.DeserializationError{Err: fmt.Errorf("failed to decode response error with invalid HTTP bindings, %w", err)}
}
return output
}
func awsRestjson1_deserializeErrorValidationException(response *smithyhttp.Response, errorBody *bytes.Reader) error {
output := &types.ValidationException{}
var buff [1024]byte
ringBuffer := smithyio.NewRingBuffer(buff[:])
body := io.TeeReader(errorBody, ringBuffer)
decoder := json.NewDecoder(body)
decoder.UseNumber()
var shape interface{}
if err := decoder.Decode(&shape); err != nil && err != io.EOF {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
err := awsRestjson1_deserializeDocumentValidationException(&output, shape)
if err != nil {
var snapshot bytes.Buffer
io.Copy(&snapshot, ringBuffer)
err = &smithy.DeserializationError{
Err: fmt.Errorf("failed to decode response body, %w", err),
Snapshot: snapshot.Bytes(),
}
return err
}
errorBody.Seek(0, io.SeekStart)
return output
}
func awsRestjson1_deserializeDocumentAccessDeniedException(v **types.AccessDeniedException, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.AccessDeniedException
if *v == nil {
sv = &types.AccessDeniedException{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Message":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ErrorMessageString to be of type string, got %T instead", value)
}
sv.Message = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentAccountHealth(v **types.AccountHealth, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.AccountHealth
if *v == nil {
sv = &types.AccountHealth{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "AccountId":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AwsAccountId to be of type string, got %T instead", value)
}
sv.AccountId = ptr.String(jtv)
}
case "Insight":
if err := awsRestjson1_deserializeDocumentAccountInsightHealth(&sv.Insight, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentAccountHealths(v *[]types.AccountHealth, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.AccountHealth
if *v == nil {
cv = []types.AccountHealth{}
} else {
cv = *v
}
for _, value := range shape {
var col types.AccountHealth
destAddr := &col
if err := awsRestjson1_deserializeDocumentAccountHealth(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentAccountInsightHealth(v **types.AccountInsightHealth, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.AccountInsightHealth
if *v == nil {
sv = &types.AccountInsightHealth{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "OpenProactiveInsights":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected NumOpenProactiveInsights to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.OpenProactiveInsights = int32(i64)
}
case "OpenReactiveInsights":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected NumOpenReactiveInsights to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.OpenReactiveInsights = int32(i64)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentAnomalyReportedTimeRange(v **types.AnomalyReportedTimeRange, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.AnomalyReportedTimeRange
if *v == nil {
sv = &types.AnomalyReportedTimeRange{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "CloseTime":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.CloseTime = ptr.Time(smithytime.ParseEpochSeconds(f64))
default:
return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value)
}
}
case "OpenTime":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.OpenTime = ptr.Time(smithytime.ParseEpochSeconds(f64))
default:
return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value)
}
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentAnomalyResource(v **types.AnomalyResource, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.AnomalyResource
if *v == nil {
sv = &types.AnomalyResource{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Name":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ResourceName to be of type string, got %T instead", value)
}
sv.Name = ptr.String(jtv)
}
case "Type":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ResourceType to be of type string, got %T instead", value)
}
sv.Type = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentAnomalyResources(v *[]types.AnomalyResource, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.AnomalyResource
if *v == nil {
cv = []types.AnomalyResource{}
} else {
cv = *v
}
for _, value := range shape {
var col types.AnomalyResource
destAddr := &col
if err := awsRestjson1_deserializeDocumentAnomalyResource(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentAnomalySourceDetails(v **types.AnomalySourceDetails, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.AnomalySourceDetails
if *v == nil {
sv = &types.AnomalySourceDetails{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "CloudWatchMetrics":
if err := awsRestjson1_deserializeDocumentCloudWatchMetricsDetails(&sv.CloudWatchMetrics, value); err != nil {
return err
}
case "PerformanceInsightsMetrics":
if err := awsRestjson1_deserializeDocumentPerformanceInsightsMetricsDetails(&sv.PerformanceInsightsMetrics, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentAnomalyTimeRange(v **types.AnomalyTimeRange, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.AnomalyTimeRange
if *v == nil {
sv = &types.AnomalyTimeRange{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "EndTime":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.EndTime = ptr.Time(smithytime.ParseEpochSeconds(f64))
default:
return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value)
}
}
case "StartTime":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.StartTime = ptr.Time(smithytime.ParseEpochSeconds(f64))
default:
return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value)
}
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentAssociatedResourceArns(v *[]string, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []string
if *v == nil {
cv = []string{}
} else {
cv = *v
}
for _, value := range shape {
var col string
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ResourceArn to be of type string, got %T instead", value)
}
col = jtv
}
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentChannels(v *[]types.NotificationChannel, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.NotificationChannel
if *v == nil {
cv = []types.NotificationChannel{}
} else {
cv = *v
}
for _, value := range shape {
var col types.NotificationChannel
destAddr := &col
if err := awsRestjson1_deserializeDocumentNotificationChannel(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentCloudFormationCollection(v **types.CloudFormationCollection, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.CloudFormationCollection
if *v == nil {
sv = &types.CloudFormationCollection{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "StackNames":
if err := awsRestjson1_deserializeDocumentStackNames(&sv.StackNames, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentCloudFormationCollectionFilter(v **types.CloudFormationCollectionFilter, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.CloudFormationCollectionFilter
if *v == nil {
sv = &types.CloudFormationCollectionFilter{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "StackNames":
if err := awsRestjson1_deserializeDocumentStackNames(&sv.StackNames, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentCloudFormationCostEstimationResourceCollectionFilter(v **types.CloudFormationCostEstimationResourceCollectionFilter, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.CloudFormationCostEstimationResourceCollectionFilter
if *v == nil {
sv = &types.CloudFormationCostEstimationResourceCollectionFilter{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "StackNames":
if err := awsRestjson1_deserializeDocumentCostEstimationStackNames(&sv.StackNames, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentCloudFormationHealth(v **types.CloudFormationHealth, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.CloudFormationHealth
if *v == nil {
sv = &types.CloudFormationHealth{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Insight":
if err := awsRestjson1_deserializeDocumentInsightHealth(&sv.Insight, value); err != nil {
return err
}
case "StackName":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected StackName to be of type string, got %T instead", value)
}
sv.StackName = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentCloudFormationHealths(v *[]types.CloudFormationHealth, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.CloudFormationHealth
if *v == nil {
cv = []types.CloudFormationHealth{}
} else {
cv = *v
}
for _, value := range shape {
var col types.CloudFormationHealth
destAddr := &col
if err := awsRestjson1_deserializeDocumentCloudFormationHealth(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentCloudWatchMetricsDataSummary(v **types.CloudWatchMetricsDataSummary, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.CloudWatchMetricsDataSummary
if *v == nil {
sv = &types.CloudWatchMetricsDataSummary{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "StatusCode":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected CloudWatchMetricDataStatusCode to be of type string, got %T instead", value)
}
sv.StatusCode = types.CloudWatchMetricDataStatusCode(jtv)
}
case "TimestampMetricValuePairList":
if err := awsRestjson1_deserializeDocumentTimestampMetricValuePairList(&sv.TimestampMetricValuePairList, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentCloudWatchMetricsDetail(v **types.CloudWatchMetricsDetail, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.CloudWatchMetricsDetail
if *v == nil {
sv = &types.CloudWatchMetricsDetail{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Dimensions":
if err := awsRestjson1_deserializeDocumentCloudWatchMetricsDimensions(&sv.Dimensions, value); err != nil {
return err
}
case "MetricDataSummary":
if err := awsRestjson1_deserializeDocumentCloudWatchMetricsDataSummary(&sv.MetricDataSummary, value); err != nil {
return err
}
case "MetricName":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected CloudWatchMetricsMetricName to be of type string, got %T instead", value)
}
sv.MetricName = ptr.String(jtv)
}
case "Namespace":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected CloudWatchMetricsNamespace to be of type string, got %T instead", value)
}
sv.Namespace = ptr.String(jtv)
}
case "Period":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected CloudWatchMetricsPeriod to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.Period = int32(i64)
}
case "Stat":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected CloudWatchMetricsStat to be of type string, got %T instead", value)
}
sv.Stat = types.CloudWatchMetricsStat(jtv)
}
case "Unit":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected CloudWatchMetricsUnit to be of type string, got %T instead", value)
}
sv.Unit = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentCloudWatchMetricsDetails(v *[]types.CloudWatchMetricsDetail, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.CloudWatchMetricsDetail
if *v == nil {
cv = []types.CloudWatchMetricsDetail{}
} else {
cv = *v
}
for _, value := range shape {
var col types.CloudWatchMetricsDetail
destAddr := &col
if err := awsRestjson1_deserializeDocumentCloudWatchMetricsDetail(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentCloudWatchMetricsDimension(v **types.CloudWatchMetricsDimension, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.CloudWatchMetricsDimension
if *v == nil {
sv = &types.CloudWatchMetricsDimension{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Name":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected CloudWatchMetricsDimensionName to be of type string, got %T instead", value)
}
sv.Name = ptr.String(jtv)
}
case "Value":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected CloudWatchMetricsDimensionValue to be of type string, got %T instead", value)
}
sv.Value = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentCloudWatchMetricsDimensions(v *[]types.CloudWatchMetricsDimension, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.CloudWatchMetricsDimension
if *v == nil {
cv = []types.CloudWatchMetricsDimension{}
} else {
cv = *v
}
for _, value := range shape {
var col types.CloudWatchMetricsDimension
destAddr := &col
if err := awsRestjson1_deserializeDocumentCloudWatchMetricsDimension(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentConflictException(v **types.ConflictException, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ConflictException
if *v == nil {
sv = &types.ConflictException{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Message":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ErrorMessageString to be of type string, got %T instead", value)
}
sv.Message = ptr.String(jtv)
}
case "ResourceId":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ResourceIdString to be of type string, got %T instead", value)
}
sv.ResourceId = ptr.String(jtv)
}
case "ResourceType":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ResourceIdType to be of type string, got %T instead", value)
}
sv.ResourceType = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentCostEstimationResourceCollectionFilter(v **types.CostEstimationResourceCollectionFilter, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.CostEstimationResourceCollectionFilter
if *v == nil {
sv = &types.CostEstimationResourceCollectionFilter{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "CloudFormation":
if err := awsRestjson1_deserializeDocumentCloudFormationCostEstimationResourceCollectionFilter(&sv.CloudFormation, value); err != nil {
return err
}
case "Tags":
if err := awsRestjson1_deserializeDocumentTagCostEstimationResourceCollectionFilters(&sv.Tags, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentCostEstimationStackNames(v *[]string, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []string
if *v == nil {
cv = []string{}
} else {
cv = *v
}
for _, value := range shape {
var col string
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected StackName to be of type string, got %T instead", value)
}
col = jtv
}
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentCostEstimationTagValues(v *[]string, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []string
if *v == nil {
cv = []string{}
} else {
cv = *v
}
for _, value := range shape {
var col string
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected TagValue to be of type string, got %T instead", value)
}
col = jtv
}
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentCostEstimationTimeRange(v **types.CostEstimationTimeRange, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.CostEstimationTimeRange
if *v == nil {
sv = &types.CostEstimationTimeRange{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "EndTime":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.EndTime = ptr.Time(smithytime.ParseEpochSeconds(f64))
default:
return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value)
}
}
case "StartTime":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.StartTime = ptr.Time(smithytime.ParseEpochSeconds(f64))
default:
return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value)
}
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentEvent(v **types.Event, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.Event
if *v == nil {
sv = &types.Event{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "DataSource":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected EventDataSource to be of type string, got %T instead", value)
}
sv.DataSource = types.EventDataSource(jtv)
}
case "EventClass":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected EventClass to be of type string, got %T instead", value)
}
sv.EventClass = types.EventClass(jtv)
}
case "EventSource":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected EventSource to be of type string, got %T instead", value)
}
sv.EventSource = ptr.String(jtv)
}
case "Id":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected EventId to be of type string, got %T instead", value)
}
sv.Id = ptr.String(jtv)
}
case "Name":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected EventName to be of type string, got %T instead", value)
}
sv.Name = ptr.String(jtv)
}
case "ResourceCollection":
if err := awsRestjson1_deserializeDocumentResourceCollection(&sv.ResourceCollection, value); err != nil {
return err
}
case "Resources":
if err := awsRestjson1_deserializeDocumentEventResources(&sv.Resources, value); err != nil {
return err
}
case "Time":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.Time = ptr.Time(smithytime.ParseEpochSeconds(f64))
default:
return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value)
}
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentEventResource(v **types.EventResource, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.EventResource
if *v == nil {
sv = &types.EventResource{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Arn":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected EventResourceArn to be of type string, got %T instead", value)
}
sv.Arn = ptr.String(jtv)
}
case "Name":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected EventResourceName to be of type string, got %T instead", value)
}
sv.Name = ptr.String(jtv)
}
case "Type":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected EventResourceType to be of type string, got %T instead", value)
}
sv.Type = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentEventResources(v *[]types.EventResource, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.EventResource
if *v == nil {
cv = []types.EventResource{}
} else {
cv = *v
}
for _, value := range shape {
var col types.EventResource
destAddr := &col
if err := awsRestjson1_deserializeDocumentEventResource(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentEvents(v *[]types.Event, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.Event
if *v == nil {
cv = []types.Event{}
} else {
cv = *v
}
for _, value := range shape {
var col types.Event
destAddr := &col
if err := awsRestjson1_deserializeDocumentEvent(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentInsightFeedback(v **types.InsightFeedback, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.InsightFeedback
if *v == nil {
sv = &types.InsightFeedback{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Feedback":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightFeedbackOption to be of type string, got %T instead", value)
}
sv.Feedback = types.InsightFeedbackOption(jtv)
}
case "Id":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightId to be of type string, got %T instead", value)
}
sv.Id = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentInsightHealth(v **types.InsightHealth, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.InsightHealth
if *v == nil {
sv = &types.InsightHealth{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "MeanTimeToRecoverInMilliseconds":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected MeanTimeToRecoverInMilliseconds to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.MeanTimeToRecoverInMilliseconds = ptr.Int64(i64)
}
case "OpenProactiveInsights":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected NumOpenProactiveInsights to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.OpenProactiveInsights = int32(i64)
}
case "OpenReactiveInsights":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected NumOpenReactiveInsights to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.OpenReactiveInsights = int32(i64)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentInsightTimeRange(v **types.InsightTimeRange, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.InsightTimeRange
if *v == nil {
sv = &types.InsightTimeRange{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "EndTime":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.EndTime = ptr.Time(smithytime.ParseEpochSeconds(f64))
default:
return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value)
}
}
case "StartTime":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.StartTime = ptr.Time(smithytime.ParseEpochSeconds(f64))
default:
return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value)
}
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentInternalServerException(v **types.InternalServerException, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.InternalServerException
if *v == nil {
sv = &types.InternalServerException{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Message":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ErrorMessageString to be of type string, got %T instead", value)
}
sv.Message = ptr.String(jtv)
}
case "RetryAfterSeconds":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected RetryAfterSeconds to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.RetryAfterSeconds = int32(i64)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentNotificationChannel(v **types.NotificationChannel, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.NotificationChannel
if *v == nil {
sv = &types.NotificationChannel{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Config":
if err := awsRestjson1_deserializeDocumentNotificationChannelConfig(&sv.Config, value); err != nil {
return err
}
case "Id":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected NotificationChannelId to be of type string, got %T instead", value)
}
sv.Id = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentNotificationChannelConfig(v **types.NotificationChannelConfig, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.NotificationChannelConfig
if *v == nil {
sv = &types.NotificationChannelConfig{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Sns":
if err := awsRestjson1_deserializeDocumentSnsChannelConfig(&sv.Sns, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentOpsCenterIntegration(v **types.OpsCenterIntegration, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.OpsCenterIntegration
if *v == nil {
sv = &types.OpsCenterIntegration{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "OptInStatus":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected OptInStatus to be of type string, got %T instead", value)
}
sv.OptInStatus = types.OptInStatus(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentPerformanceInsightsMetricDimensionGroup(v **types.PerformanceInsightsMetricDimensionGroup, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.PerformanceInsightsMetricDimensionGroup
if *v == nil {
sv = &types.PerformanceInsightsMetricDimensionGroup{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Dimensions":
if err := awsRestjson1_deserializeDocumentPerformanceInsightsMetricDimensions(&sv.Dimensions, value); err != nil {
return err
}
case "Group":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected PerformanceInsightsMetricGroup to be of type string, got %T instead", value)
}
sv.Group = ptr.String(jtv)
}
case "Limit":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected PerformanceInsightsMetricLimitInteger to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.Limit = ptr.Int32(int32(i64))
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentPerformanceInsightsMetricDimensions(v *[]string, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []string
if *v == nil {
cv = []string{}
} else {
cv = *v
}
for _, value := range shape {
var col string
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected PerformanceInsightsMetricDimension to be of type string, got %T instead", value)
}
col = jtv
}
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentPerformanceInsightsMetricFilterMap(v *map[string]string, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var mv map[string]string
if *v == nil {
mv = map[string]string{}
} else {
mv = *v
}
for key, value := range shape {
var parsedVal string
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected PerformanceInsightsMetricFilterValue to be of type string, got %T instead", value)
}
parsedVal = jtv
}
mv[key] = parsedVal
}
*v = mv
return nil
}
func awsRestjson1_deserializeDocumentPerformanceInsightsMetricQuery(v **types.PerformanceInsightsMetricQuery, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.PerformanceInsightsMetricQuery
if *v == nil {
sv = &types.PerformanceInsightsMetricQuery{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Filter":
if err := awsRestjson1_deserializeDocumentPerformanceInsightsMetricFilterMap(&sv.Filter, value); err != nil {
return err
}
case "GroupBy":
if err := awsRestjson1_deserializeDocumentPerformanceInsightsMetricDimensionGroup(&sv.GroupBy, value); err != nil {
return err
}
case "Metric":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected PerformanceInsightsMetricName to be of type string, got %T instead", value)
}
sv.Metric = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentPerformanceInsightsMetricsDetail(v **types.PerformanceInsightsMetricsDetail, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.PerformanceInsightsMetricsDetail
if *v == nil {
sv = &types.PerformanceInsightsMetricsDetail{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "MetricDisplayName":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected PerformanceInsightsMetricDisplayName to be of type string, got %T instead", value)
}
sv.MetricDisplayName = ptr.String(jtv)
}
case "MetricQuery":
if err := awsRestjson1_deserializeDocumentPerformanceInsightsMetricQuery(&sv.MetricQuery, value); err != nil {
return err
}
case "ReferenceData":
if err := awsRestjson1_deserializeDocumentPerformanceInsightsReferenceDataList(&sv.ReferenceData, value); err != nil {
return err
}
case "StatsAtAnomaly":
if err := awsRestjson1_deserializeDocumentPerformanceInsightsStats(&sv.StatsAtAnomaly, value); err != nil {
return err
}
case "StatsAtBaseline":
if err := awsRestjson1_deserializeDocumentPerformanceInsightsStats(&sv.StatsAtBaseline, value); err != nil {
return err
}
case "Unit":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected PerformanceInsightsMetricUnit to be of type string, got %T instead", value)
}
sv.Unit = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentPerformanceInsightsMetricsDetails(v *[]types.PerformanceInsightsMetricsDetail, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.PerformanceInsightsMetricsDetail
if *v == nil {
cv = []types.PerformanceInsightsMetricsDetail{}
} else {
cv = *v
}
for _, value := range shape {
var col types.PerformanceInsightsMetricsDetail
destAddr := &col
if err := awsRestjson1_deserializeDocumentPerformanceInsightsMetricsDetail(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentPerformanceInsightsReferenceComparisonValues(v **types.PerformanceInsightsReferenceComparisonValues, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.PerformanceInsightsReferenceComparisonValues
if *v == nil {
sv = &types.PerformanceInsightsReferenceComparisonValues{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "ReferenceMetric":
if err := awsRestjson1_deserializeDocumentPerformanceInsightsReferenceMetric(&sv.ReferenceMetric, value); err != nil {
return err
}
case "ReferenceScalar":
if err := awsRestjson1_deserializeDocumentPerformanceInsightsReferenceScalar(&sv.ReferenceScalar, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentPerformanceInsightsReferenceData(v **types.PerformanceInsightsReferenceData, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.PerformanceInsightsReferenceData
if *v == nil {
sv = &types.PerformanceInsightsReferenceData{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "ComparisonValues":
if err := awsRestjson1_deserializeDocumentPerformanceInsightsReferenceComparisonValues(&sv.ComparisonValues, value); err != nil {
return err
}
case "Name":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected PerformanceInsightsReferenceName to be of type string, got %T instead", value)
}
sv.Name = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentPerformanceInsightsReferenceDataList(v *[]types.PerformanceInsightsReferenceData, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.PerformanceInsightsReferenceData
if *v == nil {
cv = []types.PerformanceInsightsReferenceData{}
} else {
cv = *v
}
for _, value := range shape {
var col types.PerformanceInsightsReferenceData
destAddr := &col
if err := awsRestjson1_deserializeDocumentPerformanceInsightsReferenceData(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentPerformanceInsightsReferenceMetric(v **types.PerformanceInsightsReferenceMetric, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.PerformanceInsightsReferenceMetric
if *v == nil {
sv = &types.PerformanceInsightsReferenceMetric{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "MetricQuery":
if err := awsRestjson1_deserializeDocumentPerformanceInsightsMetricQuery(&sv.MetricQuery, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentPerformanceInsightsReferenceScalar(v **types.PerformanceInsightsReferenceScalar, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.PerformanceInsightsReferenceScalar
if *v == nil {
sv = &types.PerformanceInsightsReferenceScalar{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Value":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.Value = ptr.Float64(f64)
case string:
var f64 float64
switch {
case strings.EqualFold(jtv, "NaN"):
f64 = math.NaN()
case strings.EqualFold(jtv, "Infinity"):
f64 = math.Inf(1)
case strings.EqualFold(jtv, "-Infinity"):
f64 = math.Inf(-1)
default:
return fmt.Errorf("unknown JSON number value: %s", jtv)
}
sv.Value = ptr.Float64(f64)
default:
return fmt.Errorf("expected PerformanceInsightsValueDouble to be a JSON Number, got %T instead", value)
}
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentPerformanceInsightsStat(v **types.PerformanceInsightsStat, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.PerformanceInsightsStat
if *v == nil {
sv = &types.PerformanceInsightsStat{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Type":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected PerformanceInsightsStatType to be of type string, got %T instead", value)
}
sv.Type = ptr.String(jtv)
}
case "Value":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.Value = ptr.Float64(f64)
case string:
var f64 float64
switch {
case strings.EqualFold(jtv, "NaN"):
f64 = math.NaN()
case strings.EqualFold(jtv, "Infinity"):
f64 = math.Inf(1)
case strings.EqualFold(jtv, "-Infinity"):
f64 = math.Inf(-1)
default:
return fmt.Errorf("unknown JSON number value: %s", jtv)
}
sv.Value = ptr.Float64(f64)
default:
return fmt.Errorf("expected PerformanceInsightsValueDouble to be a JSON Number, got %T instead", value)
}
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentPerformanceInsightsStats(v *[]types.PerformanceInsightsStat, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.PerformanceInsightsStat
if *v == nil {
cv = []types.PerformanceInsightsStat{}
} else {
cv = *v
}
for _, value := range shape {
var col types.PerformanceInsightsStat
destAddr := &col
if err := awsRestjson1_deserializeDocumentPerformanceInsightsStat(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentPredictionTimeRange(v **types.PredictionTimeRange, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.PredictionTimeRange
if *v == nil {
sv = &types.PredictionTimeRange{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "EndTime":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.EndTime = ptr.Time(smithytime.ParseEpochSeconds(f64))
default:
return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value)
}
}
case "StartTime":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.StartTime = ptr.Time(smithytime.ParseEpochSeconds(f64))
default:
return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value)
}
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentProactiveAnomalies(v *[]types.ProactiveAnomalySummary, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.ProactiveAnomalySummary
if *v == nil {
cv = []types.ProactiveAnomalySummary{}
} else {
cv = *v
}
for _, value := range shape {
var col types.ProactiveAnomalySummary
destAddr := &col
if err := awsRestjson1_deserializeDocumentProactiveAnomalySummary(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentProactiveAnomaly(v **types.ProactiveAnomaly, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ProactiveAnomaly
if *v == nil {
sv = &types.ProactiveAnomaly{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "AnomalyReportedTimeRange":
if err := awsRestjson1_deserializeDocumentAnomalyReportedTimeRange(&sv.AnomalyReportedTimeRange, value); err != nil {
return err
}
case "AnomalyTimeRange":
if err := awsRestjson1_deserializeDocumentAnomalyTimeRange(&sv.AnomalyTimeRange, value); err != nil {
return err
}
case "AssociatedInsightId":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightId to be of type string, got %T instead", value)
}
sv.AssociatedInsightId = ptr.String(jtv)
}
case "Id":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalyId to be of type string, got %T instead", value)
}
sv.Id = ptr.String(jtv)
}
case "Limit":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.Limit = ptr.Float64(f64)
case string:
var f64 float64
switch {
case strings.EqualFold(jtv, "NaN"):
f64 = math.NaN()
case strings.EqualFold(jtv, "Infinity"):
f64 = math.Inf(1)
case strings.EqualFold(jtv, "-Infinity"):
f64 = math.Inf(-1)
default:
return fmt.Errorf("unknown JSON number value: %s", jtv)
}
sv.Limit = ptr.Float64(f64)
default:
return fmt.Errorf("expected AnomalyLimit to be a JSON Number, got %T instead", value)
}
}
case "PredictionTimeRange":
if err := awsRestjson1_deserializeDocumentPredictionTimeRange(&sv.PredictionTimeRange, value); err != nil {
return err
}
case "ResourceCollection":
if err := awsRestjson1_deserializeDocumentResourceCollection(&sv.ResourceCollection, value); err != nil {
return err
}
case "Severity":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalySeverity to be of type string, got %T instead", value)
}
sv.Severity = types.AnomalySeverity(jtv)
}
case "SourceDetails":
if err := awsRestjson1_deserializeDocumentAnomalySourceDetails(&sv.SourceDetails, value); err != nil {
return err
}
case "Status":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalyStatus to be of type string, got %T instead", value)
}
sv.Status = types.AnomalyStatus(jtv)
}
case "UpdateTime":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.UpdateTime = ptr.Time(smithytime.ParseEpochSeconds(f64))
default:
return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value)
}
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentProactiveAnomalySummary(v **types.ProactiveAnomalySummary, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ProactiveAnomalySummary
if *v == nil {
sv = &types.ProactiveAnomalySummary{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "AnomalyReportedTimeRange":
if err := awsRestjson1_deserializeDocumentAnomalyReportedTimeRange(&sv.AnomalyReportedTimeRange, value); err != nil {
return err
}
case "AnomalyTimeRange":
if err := awsRestjson1_deserializeDocumentAnomalyTimeRange(&sv.AnomalyTimeRange, value); err != nil {
return err
}
case "AssociatedInsightId":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightId to be of type string, got %T instead", value)
}
sv.AssociatedInsightId = ptr.String(jtv)
}
case "Id":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalyId to be of type string, got %T instead", value)
}
sv.Id = ptr.String(jtv)
}
case "Limit":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.Limit = ptr.Float64(f64)
case string:
var f64 float64
switch {
case strings.EqualFold(jtv, "NaN"):
f64 = math.NaN()
case strings.EqualFold(jtv, "Infinity"):
f64 = math.Inf(1)
case strings.EqualFold(jtv, "-Infinity"):
f64 = math.Inf(-1)
default:
return fmt.Errorf("unknown JSON number value: %s", jtv)
}
sv.Limit = ptr.Float64(f64)
default:
return fmt.Errorf("expected AnomalyLimit to be a JSON Number, got %T instead", value)
}
}
case "PredictionTimeRange":
if err := awsRestjson1_deserializeDocumentPredictionTimeRange(&sv.PredictionTimeRange, value); err != nil {
return err
}
case "ResourceCollection":
if err := awsRestjson1_deserializeDocumentResourceCollection(&sv.ResourceCollection, value); err != nil {
return err
}
case "Severity":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalySeverity to be of type string, got %T instead", value)
}
sv.Severity = types.AnomalySeverity(jtv)
}
case "SourceDetails":
if err := awsRestjson1_deserializeDocumentAnomalySourceDetails(&sv.SourceDetails, value); err != nil {
return err
}
case "Status":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalyStatus to be of type string, got %T instead", value)
}
sv.Status = types.AnomalyStatus(jtv)
}
case "UpdateTime":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.UpdateTime = ptr.Time(smithytime.ParseEpochSeconds(f64))
default:
return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value)
}
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentProactiveInsight(v **types.ProactiveInsight, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ProactiveInsight
if *v == nil {
sv = &types.ProactiveInsight{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Id":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightId to be of type string, got %T instead", value)
}
sv.Id = ptr.String(jtv)
}
case "InsightTimeRange":
if err := awsRestjson1_deserializeDocumentInsightTimeRange(&sv.InsightTimeRange, value); err != nil {
return err
}
case "Name":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightName to be of type string, got %T instead", value)
}
sv.Name = ptr.String(jtv)
}
case "PredictionTimeRange":
if err := awsRestjson1_deserializeDocumentPredictionTimeRange(&sv.PredictionTimeRange, value); err != nil {
return err
}
case "ResourceCollection":
if err := awsRestjson1_deserializeDocumentResourceCollection(&sv.ResourceCollection, value); err != nil {
return err
}
case "Severity":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightSeverity to be of type string, got %T instead", value)
}
sv.Severity = types.InsightSeverity(jtv)
}
case "SsmOpsItemId":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected SsmOpsItemId to be of type string, got %T instead", value)
}
sv.SsmOpsItemId = ptr.String(jtv)
}
case "Status":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightStatus to be of type string, got %T instead", value)
}
sv.Status = types.InsightStatus(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentProactiveInsights(v *[]types.ProactiveInsightSummary, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.ProactiveInsightSummary
if *v == nil {
cv = []types.ProactiveInsightSummary{}
} else {
cv = *v
}
for _, value := range shape {
var col types.ProactiveInsightSummary
destAddr := &col
if err := awsRestjson1_deserializeDocumentProactiveInsightSummary(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentProactiveInsightSummary(v **types.ProactiveInsightSummary, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ProactiveInsightSummary
if *v == nil {
sv = &types.ProactiveInsightSummary{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "AssociatedResourceArns":
if err := awsRestjson1_deserializeDocumentAssociatedResourceArns(&sv.AssociatedResourceArns, value); err != nil {
return err
}
case "Id":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightId to be of type string, got %T instead", value)
}
sv.Id = ptr.String(jtv)
}
case "InsightTimeRange":
if err := awsRestjson1_deserializeDocumentInsightTimeRange(&sv.InsightTimeRange, value); err != nil {
return err
}
case "Name":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightName to be of type string, got %T instead", value)
}
sv.Name = ptr.String(jtv)
}
case "PredictionTimeRange":
if err := awsRestjson1_deserializeDocumentPredictionTimeRange(&sv.PredictionTimeRange, value); err != nil {
return err
}
case "ResourceCollection":
if err := awsRestjson1_deserializeDocumentResourceCollection(&sv.ResourceCollection, value); err != nil {
return err
}
case "ServiceCollection":
if err := awsRestjson1_deserializeDocumentServiceCollection(&sv.ServiceCollection, value); err != nil {
return err
}
case "Severity":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightSeverity to be of type string, got %T instead", value)
}
sv.Severity = types.InsightSeverity(jtv)
}
case "Status":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightStatus to be of type string, got %T instead", value)
}
sv.Status = types.InsightStatus(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentProactiveOrganizationInsights(v *[]types.ProactiveOrganizationInsightSummary, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.ProactiveOrganizationInsightSummary
if *v == nil {
cv = []types.ProactiveOrganizationInsightSummary{}
} else {
cv = *v
}
for _, value := range shape {
var col types.ProactiveOrganizationInsightSummary
destAddr := &col
if err := awsRestjson1_deserializeDocumentProactiveOrganizationInsightSummary(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentProactiveOrganizationInsightSummary(v **types.ProactiveOrganizationInsightSummary, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ProactiveOrganizationInsightSummary
if *v == nil {
sv = &types.ProactiveOrganizationInsightSummary{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "AccountId":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AwsAccountId to be of type string, got %T instead", value)
}
sv.AccountId = ptr.String(jtv)
}
case "Id":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightId to be of type string, got %T instead", value)
}
sv.Id = ptr.String(jtv)
}
case "InsightTimeRange":
if err := awsRestjson1_deserializeDocumentInsightTimeRange(&sv.InsightTimeRange, value); err != nil {
return err
}
case "Name":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightName to be of type string, got %T instead", value)
}
sv.Name = ptr.String(jtv)
}
case "OrganizationalUnitId":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected OrganizationalUnitId to be of type string, got %T instead", value)
}
sv.OrganizationalUnitId = ptr.String(jtv)
}
case "PredictionTimeRange":
if err := awsRestjson1_deserializeDocumentPredictionTimeRange(&sv.PredictionTimeRange, value); err != nil {
return err
}
case "ResourceCollection":
if err := awsRestjson1_deserializeDocumentResourceCollection(&sv.ResourceCollection, value); err != nil {
return err
}
case "ServiceCollection":
if err := awsRestjson1_deserializeDocumentServiceCollection(&sv.ServiceCollection, value); err != nil {
return err
}
case "Severity":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightSeverity to be of type string, got %T instead", value)
}
sv.Severity = types.InsightSeverity(jtv)
}
case "Status":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightStatus to be of type string, got %T instead", value)
}
sv.Status = types.InsightStatus(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentReactiveAnomalies(v *[]types.ReactiveAnomalySummary, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.ReactiveAnomalySummary
if *v == nil {
cv = []types.ReactiveAnomalySummary{}
} else {
cv = *v
}
for _, value := range shape {
var col types.ReactiveAnomalySummary
destAddr := &col
if err := awsRestjson1_deserializeDocumentReactiveAnomalySummary(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentReactiveAnomaly(v **types.ReactiveAnomaly, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ReactiveAnomaly
if *v == nil {
sv = &types.ReactiveAnomaly{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "AnomalyReportedTimeRange":
if err := awsRestjson1_deserializeDocumentAnomalyReportedTimeRange(&sv.AnomalyReportedTimeRange, value); err != nil {
return err
}
case "AnomalyResources":
if err := awsRestjson1_deserializeDocumentAnomalyResources(&sv.AnomalyResources, value); err != nil {
return err
}
case "AnomalyTimeRange":
if err := awsRestjson1_deserializeDocumentAnomalyTimeRange(&sv.AnomalyTimeRange, value); err != nil {
return err
}
case "AssociatedInsightId":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightId to be of type string, got %T instead", value)
}
sv.AssociatedInsightId = ptr.String(jtv)
}
case "CausalAnomalyId":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalyId to be of type string, got %T instead", value)
}
sv.CausalAnomalyId = ptr.String(jtv)
}
case "Description":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalyDescription to be of type string, got %T instead", value)
}
sv.Description = ptr.String(jtv)
}
case "Id":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalyId to be of type string, got %T instead", value)
}
sv.Id = ptr.String(jtv)
}
case "Name":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalyName to be of type string, got %T instead", value)
}
sv.Name = ptr.String(jtv)
}
case "ResourceCollection":
if err := awsRestjson1_deserializeDocumentResourceCollection(&sv.ResourceCollection, value); err != nil {
return err
}
case "Severity":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalySeverity to be of type string, got %T instead", value)
}
sv.Severity = types.AnomalySeverity(jtv)
}
case "SourceDetails":
if err := awsRestjson1_deserializeDocumentAnomalySourceDetails(&sv.SourceDetails, value); err != nil {
return err
}
case "Status":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalyStatus to be of type string, got %T instead", value)
}
sv.Status = types.AnomalyStatus(jtv)
}
case "Type":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalyType to be of type string, got %T instead", value)
}
sv.Type = types.AnomalyType(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentReactiveAnomalySummary(v **types.ReactiveAnomalySummary, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ReactiveAnomalySummary
if *v == nil {
sv = &types.ReactiveAnomalySummary{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "AnomalyReportedTimeRange":
if err := awsRestjson1_deserializeDocumentAnomalyReportedTimeRange(&sv.AnomalyReportedTimeRange, value); err != nil {
return err
}
case "AnomalyResources":
if err := awsRestjson1_deserializeDocumentAnomalyResources(&sv.AnomalyResources, value); err != nil {
return err
}
case "AnomalyTimeRange":
if err := awsRestjson1_deserializeDocumentAnomalyTimeRange(&sv.AnomalyTimeRange, value); err != nil {
return err
}
case "AssociatedInsightId":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightId to be of type string, got %T instead", value)
}
sv.AssociatedInsightId = ptr.String(jtv)
}
case "CausalAnomalyId":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalyId to be of type string, got %T instead", value)
}
sv.CausalAnomalyId = ptr.String(jtv)
}
case "Description":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalyDescription to be of type string, got %T instead", value)
}
sv.Description = ptr.String(jtv)
}
case "Id":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalyId to be of type string, got %T instead", value)
}
sv.Id = ptr.String(jtv)
}
case "Name":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalyName to be of type string, got %T instead", value)
}
sv.Name = ptr.String(jtv)
}
case "ResourceCollection":
if err := awsRestjson1_deserializeDocumentResourceCollection(&sv.ResourceCollection, value); err != nil {
return err
}
case "Severity":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalySeverity to be of type string, got %T instead", value)
}
sv.Severity = types.AnomalySeverity(jtv)
}
case "SourceDetails":
if err := awsRestjson1_deserializeDocumentAnomalySourceDetails(&sv.SourceDetails, value); err != nil {
return err
}
case "Status":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalyStatus to be of type string, got %T instead", value)
}
sv.Status = types.AnomalyStatus(jtv)
}
case "Type":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalyType to be of type string, got %T instead", value)
}
sv.Type = types.AnomalyType(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentReactiveInsight(v **types.ReactiveInsight, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ReactiveInsight
if *v == nil {
sv = &types.ReactiveInsight{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Id":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightId to be of type string, got %T instead", value)
}
sv.Id = ptr.String(jtv)
}
case "InsightTimeRange":
if err := awsRestjson1_deserializeDocumentInsightTimeRange(&sv.InsightTimeRange, value); err != nil {
return err
}
case "Name":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightName to be of type string, got %T instead", value)
}
sv.Name = ptr.String(jtv)
}
case "ResourceCollection":
if err := awsRestjson1_deserializeDocumentResourceCollection(&sv.ResourceCollection, value); err != nil {
return err
}
case "Severity":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightSeverity to be of type string, got %T instead", value)
}
sv.Severity = types.InsightSeverity(jtv)
}
case "SsmOpsItemId":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected SsmOpsItemId to be of type string, got %T instead", value)
}
sv.SsmOpsItemId = ptr.String(jtv)
}
case "Status":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightStatus to be of type string, got %T instead", value)
}
sv.Status = types.InsightStatus(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentReactiveInsights(v *[]types.ReactiveInsightSummary, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.ReactiveInsightSummary
if *v == nil {
cv = []types.ReactiveInsightSummary{}
} else {
cv = *v
}
for _, value := range shape {
var col types.ReactiveInsightSummary
destAddr := &col
if err := awsRestjson1_deserializeDocumentReactiveInsightSummary(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentReactiveInsightSummary(v **types.ReactiveInsightSummary, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ReactiveInsightSummary
if *v == nil {
sv = &types.ReactiveInsightSummary{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "AssociatedResourceArns":
if err := awsRestjson1_deserializeDocumentAssociatedResourceArns(&sv.AssociatedResourceArns, value); err != nil {
return err
}
case "Id":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightId to be of type string, got %T instead", value)
}
sv.Id = ptr.String(jtv)
}
case "InsightTimeRange":
if err := awsRestjson1_deserializeDocumentInsightTimeRange(&sv.InsightTimeRange, value); err != nil {
return err
}
case "Name":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightName to be of type string, got %T instead", value)
}
sv.Name = ptr.String(jtv)
}
case "ResourceCollection":
if err := awsRestjson1_deserializeDocumentResourceCollection(&sv.ResourceCollection, value); err != nil {
return err
}
case "ServiceCollection":
if err := awsRestjson1_deserializeDocumentServiceCollection(&sv.ServiceCollection, value); err != nil {
return err
}
case "Severity":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightSeverity to be of type string, got %T instead", value)
}
sv.Severity = types.InsightSeverity(jtv)
}
case "Status":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightStatus to be of type string, got %T instead", value)
}
sv.Status = types.InsightStatus(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentReactiveOrganizationInsights(v *[]types.ReactiveOrganizationInsightSummary, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.ReactiveOrganizationInsightSummary
if *v == nil {
cv = []types.ReactiveOrganizationInsightSummary{}
} else {
cv = *v
}
for _, value := range shape {
var col types.ReactiveOrganizationInsightSummary
destAddr := &col
if err := awsRestjson1_deserializeDocumentReactiveOrganizationInsightSummary(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentReactiveOrganizationInsightSummary(v **types.ReactiveOrganizationInsightSummary, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ReactiveOrganizationInsightSummary
if *v == nil {
sv = &types.ReactiveOrganizationInsightSummary{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "AccountId":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AwsAccountId to be of type string, got %T instead", value)
}
sv.AccountId = ptr.String(jtv)
}
case "Id":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightId to be of type string, got %T instead", value)
}
sv.Id = ptr.String(jtv)
}
case "InsightTimeRange":
if err := awsRestjson1_deserializeDocumentInsightTimeRange(&sv.InsightTimeRange, value); err != nil {
return err
}
case "Name":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightName to be of type string, got %T instead", value)
}
sv.Name = ptr.String(jtv)
}
case "OrganizationalUnitId":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected OrganizationalUnitId to be of type string, got %T instead", value)
}
sv.OrganizationalUnitId = ptr.String(jtv)
}
case "ResourceCollection":
if err := awsRestjson1_deserializeDocumentResourceCollection(&sv.ResourceCollection, value); err != nil {
return err
}
case "ServiceCollection":
if err := awsRestjson1_deserializeDocumentServiceCollection(&sv.ServiceCollection, value); err != nil {
return err
}
case "Severity":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightSeverity to be of type string, got %T instead", value)
}
sv.Severity = types.InsightSeverity(jtv)
}
case "Status":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected InsightStatus to be of type string, got %T instead", value)
}
sv.Status = types.InsightStatus(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentRecommendation(v **types.Recommendation, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.Recommendation
if *v == nil {
sv = &types.Recommendation{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Description":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected RecommendationDescription to be of type string, got %T instead", value)
}
sv.Description = ptr.String(jtv)
}
case "Link":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected RecommendationLink to be of type string, got %T instead", value)
}
sv.Link = ptr.String(jtv)
}
case "Name":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected RecommendationName to be of type string, got %T instead", value)
}
sv.Name = ptr.String(jtv)
}
case "Reason":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected RecommendationReason to be of type string, got %T instead", value)
}
sv.Reason = ptr.String(jtv)
}
case "RelatedAnomalies":
if err := awsRestjson1_deserializeDocumentRecommendationRelatedAnomalies(&sv.RelatedAnomalies, value); err != nil {
return err
}
case "RelatedEvents":
if err := awsRestjson1_deserializeDocumentRecommendationRelatedEvents(&sv.RelatedEvents, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentRecommendationRelatedAnomalies(v *[]types.RecommendationRelatedAnomaly, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.RecommendationRelatedAnomaly
if *v == nil {
cv = []types.RecommendationRelatedAnomaly{}
} else {
cv = *v
}
for _, value := range shape {
var col types.RecommendationRelatedAnomaly
destAddr := &col
if err := awsRestjson1_deserializeDocumentRecommendationRelatedAnomaly(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentRecommendationRelatedAnomaly(v **types.RecommendationRelatedAnomaly, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.RecommendationRelatedAnomaly
if *v == nil {
sv = &types.RecommendationRelatedAnomaly{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "AnomalyId":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AnomalyId to be of type string, got %T instead", value)
}
sv.AnomalyId = ptr.String(jtv)
}
case "Resources":
if err := awsRestjson1_deserializeDocumentRecommendationRelatedAnomalyResources(&sv.Resources, value); err != nil {
return err
}
case "SourceDetails":
if err := awsRestjson1_deserializeDocumentRelatedAnomalySourceDetails(&sv.SourceDetails, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentRecommendationRelatedAnomalyResource(v **types.RecommendationRelatedAnomalyResource, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.RecommendationRelatedAnomalyResource
if *v == nil {
sv = &types.RecommendationRelatedAnomalyResource{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Name":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected RecommendationRelatedAnomalyResourceName to be of type string, got %T instead", value)
}
sv.Name = ptr.String(jtv)
}
case "Type":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected RecommendationRelatedAnomalyResourceType to be of type string, got %T instead", value)
}
sv.Type = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentRecommendationRelatedAnomalyResources(v *[]types.RecommendationRelatedAnomalyResource, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.RecommendationRelatedAnomalyResource
if *v == nil {
cv = []types.RecommendationRelatedAnomalyResource{}
} else {
cv = *v
}
for _, value := range shape {
var col types.RecommendationRelatedAnomalyResource
destAddr := &col
if err := awsRestjson1_deserializeDocumentRecommendationRelatedAnomalyResource(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentRecommendationRelatedAnomalySourceDetail(v **types.RecommendationRelatedAnomalySourceDetail, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.RecommendationRelatedAnomalySourceDetail
if *v == nil {
sv = &types.RecommendationRelatedAnomalySourceDetail{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "CloudWatchMetrics":
if err := awsRestjson1_deserializeDocumentRecommendationRelatedCloudWatchMetricsSourceDetails(&sv.CloudWatchMetrics, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentRecommendationRelatedCloudWatchMetricsSourceDetail(v **types.RecommendationRelatedCloudWatchMetricsSourceDetail, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.RecommendationRelatedCloudWatchMetricsSourceDetail
if *v == nil {
sv = &types.RecommendationRelatedCloudWatchMetricsSourceDetail{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "MetricName":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected RecommendationRelatedCloudWatchMetricsSourceMetricName to be of type string, got %T instead", value)
}
sv.MetricName = ptr.String(jtv)
}
case "Namespace":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected RecommendationRelatedCloudWatchMetricsSourceNamespace to be of type string, got %T instead", value)
}
sv.Namespace = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentRecommendationRelatedCloudWatchMetricsSourceDetails(v *[]types.RecommendationRelatedCloudWatchMetricsSourceDetail, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.RecommendationRelatedCloudWatchMetricsSourceDetail
if *v == nil {
cv = []types.RecommendationRelatedCloudWatchMetricsSourceDetail{}
} else {
cv = *v
}
for _, value := range shape {
var col types.RecommendationRelatedCloudWatchMetricsSourceDetail
destAddr := &col
if err := awsRestjson1_deserializeDocumentRecommendationRelatedCloudWatchMetricsSourceDetail(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentRecommendationRelatedEvent(v **types.RecommendationRelatedEvent, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.RecommendationRelatedEvent
if *v == nil {
sv = &types.RecommendationRelatedEvent{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Name":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected RecommendationRelatedEventName to be of type string, got %T instead", value)
}
sv.Name = ptr.String(jtv)
}
case "Resources":
if err := awsRestjson1_deserializeDocumentRecommendationRelatedEventResources(&sv.Resources, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentRecommendationRelatedEventResource(v **types.RecommendationRelatedEventResource, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.RecommendationRelatedEventResource
if *v == nil {
sv = &types.RecommendationRelatedEventResource{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Name":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected RecommendationRelatedEventResourceName to be of type string, got %T instead", value)
}
sv.Name = ptr.String(jtv)
}
case "Type":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected RecommendationRelatedEventResourceType to be of type string, got %T instead", value)
}
sv.Type = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentRecommendationRelatedEventResources(v *[]types.RecommendationRelatedEventResource, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.RecommendationRelatedEventResource
if *v == nil {
cv = []types.RecommendationRelatedEventResource{}
} else {
cv = *v
}
for _, value := range shape {
var col types.RecommendationRelatedEventResource
destAddr := &col
if err := awsRestjson1_deserializeDocumentRecommendationRelatedEventResource(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentRecommendationRelatedEvents(v *[]types.RecommendationRelatedEvent, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.RecommendationRelatedEvent
if *v == nil {
cv = []types.RecommendationRelatedEvent{}
} else {
cv = *v
}
for _, value := range shape {
var col types.RecommendationRelatedEvent
destAddr := &col
if err := awsRestjson1_deserializeDocumentRecommendationRelatedEvent(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentRecommendations(v *[]types.Recommendation, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.Recommendation
if *v == nil {
cv = []types.Recommendation{}
} else {
cv = *v
}
for _, value := range shape {
var col types.Recommendation
destAddr := &col
if err := awsRestjson1_deserializeDocumentRecommendation(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentRelatedAnomalySourceDetails(v *[]types.RecommendationRelatedAnomalySourceDetail, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.RecommendationRelatedAnomalySourceDetail
if *v == nil {
cv = []types.RecommendationRelatedAnomalySourceDetail{}
} else {
cv = *v
}
for _, value := range shape {
var col types.RecommendationRelatedAnomalySourceDetail
destAddr := &col
if err := awsRestjson1_deserializeDocumentRecommendationRelatedAnomalySourceDetail(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentResourceCollection(v **types.ResourceCollection, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ResourceCollection
if *v == nil {
sv = &types.ResourceCollection{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "CloudFormation":
if err := awsRestjson1_deserializeDocumentCloudFormationCollection(&sv.CloudFormation, value); err != nil {
return err
}
case "Tags":
if err := awsRestjson1_deserializeDocumentTagCollections(&sv.Tags, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentResourceCollectionFilter(v **types.ResourceCollectionFilter, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ResourceCollectionFilter
if *v == nil {
sv = &types.ResourceCollectionFilter{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "CloudFormation":
if err := awsRestjson1_deserializeDocumentCloudFormationCollectionFilter(&sv.CloudFormation, value); err != nil {
return err
}
case "Tags":
if err := awsRestjson1_deserializeDocumentTagCollectionFilters(&sv.Tags, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentResourceNotFoundException(v **types.ResourceNotFoundException, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ResourceNotFoundException
if *v == nil {
sv = &types.ResourceNotFoundException{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Message":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ErrorMessageString to be of type string, got %T instead", value)
}
sv.Message = ptr.String(jtv)
}
case "ResourceId":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ResourceIdString to be of type string, got %T instead", value)
}
sv.ResourceId = ptr.String(jtv)
}
case "ResourceType":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ResourceIdType to be of type string, got %T instead", value)
}
sv.ResourceType = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentServiceCollection(v **types.ServiceCollection, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ServiceCollection
if *v == nil {
sv = &types.ServiceCollection{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "ServiceNames":
if err := awsRestjson1_deserializeDocumentServiceNames(&sv.ServiceNames, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentServiceHealth(v **types.ServiceHealth, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ServiceHealth
if *v == nil {
sv = &types.ServiceHealth{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Insight":
if err := awsRestjson1_deserializeDocumentServiceInsightHealth(&sv.Insight, value); err != nil {
return err
}
case "ServiceName":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ServiceName to be of type string, got %T instead", value)
}
sv.ServiceName = types.ServiceName(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentServiceHealths(v *[]types.ServiceHealth, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.ServiceHealth
if *v == nil {
cv = []types.ServiceHealth{}
} else {
cv = *v
}
for _, value := range shape {
var col types.ServiceHealth
destAddr := &col
if err := awsRestjson1_deserializeDocumentServiceHealth(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentServiceInsightHealth(v **types.ServiceInsightHealth, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ServiceInsightHealth
if *v == nil {
sv = &types.ServiceInsightHealth{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "OpenProactiveInsights":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected NumOpenProactiveInsights to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.OpenProactiveInsights = int32(i64)
}
case "OpenReactiveInsights":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected NumOpenReactiveInsights to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.OpenReactiveInsights = int32(i64)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentServiceIntegrationConfig(v **types.ServiceIntegrationConfig, value interface{}) error {
|
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ServiceIntegrationConfig
if *v == nil {
sv = &types.ServiceIntegrationConfig{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "OpsCenter":
if err := awsRestjson1_deserializeDocumentOpsCenterIntegration(&sv.OpsCenter, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentServiceNames(v *[]types.ServiceName, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.ServiceName
if *v == nil {
cv = []types.ServiceName{}
} else {
cv = *v
}
for _, value := range shape {
var col types.ServiceName
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ServiceName to be of type string, got %T instead", value)
}
col = types.ServiceName(jtv)
}
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentServiceQuotaExceededException(v **types.ServiceQuotaExceededException, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ServiceQuotaExceededException
if *v == nil {
sv = &types.ServiceQuotaExceededException{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Message":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ErrorMessageString to be of type string, got %T instead", value)
}
sv.Message = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentServiceResourceCost(v **types.ServiceResourceCost, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ServiceResourceCost
if *v == nil {
sv = &types.ServiceResourceCost{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Cost":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.Cost = f64
case string:
var f64 float64
switch {
case strings.EqualFold(jtv, "NaN"):
f64 = math.NaN()
case strings.EqualFold(jtv, "Infinity"):
f64 = math.Inf(1)
case strings.EqualFold(jtv, "-Infinity"):
f64 = math.Inf(-1)
default:
return fmt.Errorf("unknown JSON number value: %s", jtv)
}
sv.Cost = f64
default:
return fmt.Errorf("expected Cost to be a JSON Number, got %T instead", value)
}
}
case "Count":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected CostEstimationServiceResourceCount to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.Count = int32(i64)
}
case "State":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected CostEstimationServiceResourceState to be of type string, got %T instead", value)
}
sv.State = types.CostEstimationServiceResourceState(jtv)
}
case "Type":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ResourceType to be of type string, got %T instead", value)
}
sv.Type = ptr.String(jtv)
}
case "UnitCost":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.UnitCost = f64
case string:
var f64 float64
switch {
case strings.EqualFold(jtv, "NaN"):
f64 = math.NaN()
case strings.EqualFold(jtv, "Infinity"):
f64 = math.Inf(1)
case strings.EqualFold(jtv, "-Infinity"):
f64 = math.Inf(-1)
default:
return fmt.Errorf("unknown JSON number value: %s", jtv)
}
sv.UnitCost = f64
default:
return fmt.Errorf("expected Cost to be a JSON Number, got %T instead", value)
}
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentServiceResourceCosts(v *[]types.ServiceResourceCost, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.ServiceResourceCost
if *v == nil {
cv = []types.ServiceResourceCost{}
} else {
cv = *v
}
for _, value := range shape {
var col types.ServiceResourceCost
destAddr := &col
if err := awsRestjson1_deserializeDocumentServiceResourceCost(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentSnsChannelConfig(v **types.SnsChannelConfig, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.SnsChannelConfig
if *v == nil {
sv = &types.SnsChannelConfig{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "TopicArn":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected TopicArn to be of type string, got %T instead", value)
}
sv.TopicArn = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentStackNames(v *[]string, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []string
if *v == nil {
cv = []string{}
} else {
cv = *v
}
for _, value := range shape {
var col string
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected StackName to be of type string, got %T instead", value)
}
col = jtv
}
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentTagCollection(v **types.TagCollection, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.TagCollection
if *v == nil {
sv = &types.TagCollection{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "AppBoundaryKey":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AppBoundaryKey to be of type string, got %T instead", value)
}
sv.AppBoundaryKey = ptr.String(jtv)
}
case "TagValues":
if err := awsRestjson1_deserializeDocumentTagValues(&sv.TagValues, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentTagCollectionFilter(v **types.TagCollectionFilter, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.TagCollectionFilter
if *v == nil {
sv = &types.TagCollectionFilter{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "AppBoundaryKey":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AppBoundaryKey to be of type string, got %T instead", value)
}
sv.AppBoundaryKey = ptr.String(jtv)
}
case "TagValues":
if err := awsRestjson1_deserializeDocumentTagValues(&sv.TagValues, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentTagCollectionFilters(v *[]types.TagCollectionFilter, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.TagCollectionFilter
if *v == nil {
cv = []types.TagCollectionFilter{}
} else {
cv = *v
}
for _, value := range shape {
var col types.TagCollectionFilter
destAddr := &col
if err := awsRestjson1_deserializeDocumentTagCollectionFilter(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentTagCollections(v *[]types.TagCollection, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.TagCollection
if *v == nil {
cv = []types.TagCollection{}
} else {
cv = *v
}
for _, value := range shape {
var col types.TagCollection
destAddr := &col
if err := awsRestjson1_deserializeDocumentTagCollection(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentTagCostEstimationResourceCollectionFilter(v **types.TagCostEstimationResourceCollectionFilter, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.TagCostEstimationResourceCollectionFilter
if *v == nil {
sv = &types.TagCostEstimationResourceCollectionFilter{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "AppBoundaryKey":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AppBoundaryKey to be of type string, got %T instead", value)
}
sv.AppBoundaryKey = ptr.String(jtv)
}
case "TagValues":
if err := awsRestjson1_deserializeDocumentCostEstimationTagValues(&sv.TagValues, value); err != nil {
return err
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentTagCostEstimationResourceCollectionFilters(v *[]types.TagCostEstimationResourceCollectionFilter, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.TagCostEstimationResourceCollectionFilter
if *v == nil {
cv = []types.TagCostEstimationResourceCollectionFilter{}
} else {
cv = *v
}
for _, value := range shape {
var col types.TagCostEstimationResourceCollectionFilter
destAddr := &col
if err := awsRestjson1_deserializeDocumentTagCostEstimationResourceCollectionFilter(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentTagHealth(v **types.TagHealth, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.TagHealth
if *v == nil {
sv = &types.TagHealth{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "AppBoundaryKey":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected AppBoundaryKey to be of type string, got %T instead", value)
}
sv.AppBoundaryKey = ptr.String(jtv)
}
case "Insight":
if err := awsRestjson1_deserializeDocumentInsightHealth(&sv.Insight, value); err != nil {
return err
}
case "TagValue":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected TagValue to be of type string, got %T instead", value)
}
sv.TagValue = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentTagHealths(v *[]types.TagHealth, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.TagHealth
if *v == nil {
cv = []types.TagHealth{}
} else {
cv = *v
}
for _, value := range shape {
var col types.TagHealth
destAddr := &col
if err := awsRestjson1_deserializeDocumentTagHealth(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentTagValues(v *[]string, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []string
if *v == nil {
cv = []string{}
} else {
cv = *v
}
for _, value := range shape {
var col string
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected TagValue to be of type string, got %T instead", value)
}
col = jtv
}
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentThrottlingException(v **types.ThrottlingException, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ThrottlingException
if *v == nil {
sv = &types.ThrottlingException{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Message":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ErrorMessageString to be of type string, got %T instead", value)
}
sv.Message = ptr.String(jtv)
}
case "QuotaCode":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ErrorQuotaCodeString to be of type string, got %T instead", value)
}
sv.QuotaCode = ptr.String(jtv)
}
case "RetryAfterSeconds":
if value != nil {
jtv, ok := value.(json.Number)
if !ok {
return fmt.Errorf("expected RetryAfterSeconds to be json.Number, got %T instead", value)
}
i64, err := jtv.Int64()
if err != nil {
return err
}
sv.RetryAfterSeconds = int32(i64)
}
case "ServiceCode":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ErrorServiceCodeString to be of type string, got %T instead", value)
}
sv.ServiceCode = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentTimestampMetricValuePair(v **types.TimestampMetricValuePair, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.TimestampMetricValuePair
if *v == nil {
sv = &types.TimestampMetricValuePair{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "MetricValue":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.MetricValue = ptr.Float64(f64)
case string:
var f64 float64
switch {
case strings.EqualFold(jtv, "NaN"):
f64 = math.NaN()
case strings.EqualFold(jtv, "Infinity"):
f64 = math.Inf(1)
case strings.EqualFold(jtv, "-Infinity"):
f64 = math.Inf(-1)
default:
return fmt.Errorf("unknown JSON number value: %s", jtv)
}
sv.MetricValue = ptr.Float64(f64)
default:
return fmt.Errorf("expected MetricValue to be a JSON Number, got %T instead", value)
}
}
case "Timestamp":
if value != nil {
switch jtv := value.(type) {
case json.Number:
f64, err := jtv.Float64()
if err != nil {
return err
}
sv.Timestamp = ptr.Time(smithytime.ParseEpochSeconds(f64))
default:
return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value)
}
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentTimestampMetricValuePairList(v *[]types.TimestampMetricValuePair, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.TimestampMetricValuePair
if *v == nil {
cv = []types.TimestampMetricValuePair{}
} else {
cv = *v
}
for _, value := range shape {
var col types.TimestampMetricValuePair
destAddr := &col
if err := awsRestjson1_deserializeDocumentTimestampMetricValuePair(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
func awsRestjson1_deserializeDocumentValidationException(v **types.ValidationException, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ValidationException
if *v == nil {
sv = &types.ValidationException{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Fields":
if err := awsRestjson1_deserializeDocumentValidationExceptionFields(&sv.Fields, value); err != nil {
return err
}
case "Message":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ErrorMessageString to be of type string, got %T instead", value)
}
sv.Message = ptr.String(jtv)
}
case "Reason":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ValidationExceptionReason to be of type string, got %T instead", value)
}
sv.Reason = types.ValidationExceptionReason(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentValidationExceptionField(v **types.ValidationExceptionField, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.(map[string]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var sv *types.ValidationExceptionField
if *v == nil {
sv = &types.ValidationExceptionField{}
} else {
sv = *v
}
for key, value := range shape {
switch key {
case "Message":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ErrorMessageString to be of type string, got %T instead", value)
}
sv.Message = ptr.String(jtv)
}
case "Name":
if value != nil {
jtv, ok := value.(string)
if !ok {
return fmt.Errorf("expected ErrorNameString to be of type string, got %T instead", value)
}
sv.Name = ptr.String(jtv)
}
default:
_, _ = key, value
}
}
*v = sv
return nil
}
func awsRestjson1_deserializeDocumentValidationExceptionFields(v *[]types.ValidationExceptionField, value interface{}) error {
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
return nil
}
shape, ok := value.([]interface{})
if !ok {
return fmt.Errorf("unexpected JSON type %v", value)
}
var cv []types.ValidationExceptionField
if *v == nil {
cv = []types.ValidationExceptionField{}
} else {
cv = *v
}
for _, value := range shape {
var col types.ValidationExceptionField
destAddr := &col
if err := awsRestjson1_deserializeDocumentValidationExceptionField(&destAddr, value); err != nil {
return err
}
col = *destAddr
cv = append(cv, col)
}
*v = cv
return nil
}
|
if v == nil {
return fmt.Errorf("unexpected nil of type %T", v)
}
if value == nil {
|
dummy.py
|
"Dummy cache backend"
from django.core.cache.backends.base import BaseCache
class CacheClass(BaseCache):
def __init__(self, *args, **kwargs):
pass
def add(self, key, *args, **kwargs):
self.validate_key(key)
return True
def get(self, key, default=None):
self.validate_key(key)
return default
def set(self, key, *args, **kwargs):
self.validate_key(key)
def delete(self, key, *args, **kwargs):
self.validate_key(key)
def get_many(self, *args, **kwargs):
return {}
def has_key(self, key, *args, **kwargs):
self.validate_key(key)
return False
def set_many(self, *args, **kwargs):
pass
def delete_many(self, *args, **kwargs):
|
def clear(self):
pass
|
pass
|
track.go
|
// Copyright 2015 Keybase, Inc. All rights reserved. Use of
// this source code is governed by the included BSD license.
package libkb
import (
"errors"
"fmt"
"time"
keybase1 "github.com/keybase/client/go/protocol/keybase1"
jsonw "github.com/keybase/go-jsonw"
)
var ErrTrackingExpired = errors.New("Local track expired")
// Can be a ProofLinkWithState, one of the identities listed in a
// tracking statement, or a PGP Fingerprint!
type TrackIDComponent interface {
ToIDString() string
ToKeyValuePair() (string, string)
GetProofState() keybase1.ProofState
LastWriterWins() bool
GetProofType() keybase1.ProofType
}
type TrackSet struct {
ids map[string]TrackIDComponent
services map[string]bool
}
func NewTrackSet() *TrackSet {
return &TrackSet{
ids: make(map[string]TrackIDComponent),
services: make(map[string]bool),
}
}
func (ts TrackSet) Add(t TrackIDComponent) {
ts.ids[t.ToIDString()] = t
if t.LastWriterWins() {
k, _ := t.ToKeyValuePair()
ts.services[k] = true
}
}
func (ts TrackSet) GetProofState(id string) keybase1.ProofState {
ret := keybase1.ProofState_NONE
if obj := ts.ids[id]; obj != nil {
ret = obj.GetProofState()
}
return ret
}
func (ts TrackSet) Subtract(b TrackSet) (out []TrackIDComponent) {
for _, c := range ts.ids {
if !b.HasMember(c) {
out = append(out, c)
}
}
return
}
func (ts TrackSet) HasMember(t TrackIDComponent) bool {
var found bool
// For LastWriterWins like social networks, then it just matters
// that there is some proof for the service. For non-last-writer-wins,
// like HTTPS and DNS, then the full proof needs to show up in A.
if t.LastWriterWins() {
k, _ := t.ToKeyValuePair()
_, found = ts.services[k]
} else {
_, found = ts.ids[t.ToIDString()]
}
return found
}
func (ts TrackSet) LenEq(b TrackSet) bool {
return len(ts.ids) == len(b.ids)
}
//=====================================================================
type TrackInstructions struct {
Local bool
Remote bool
}
//=====================================================================
type TrackSummary struct {
time time.Time
isRemote bool
username string
}
func (s TrackSummary) IsRemote() bool { return s.isRemote }
func (s TrackSummary) GetCTime() time.Time { return s.time }
func (s TrackSummary) Username() string { return s.username }
//=====================================================================
type TrackLookup struct {
Contextified
link *TrackChainLink // The original chain link that I signed
set *TrackSet // The total set of tracked identities
ids map[string][]string // A http -> [foo.com, boo.com] lookup
trackerSeqno keybase1.Seqno // The seqno in the tracker's sighcain
}
func (l TrackLookup) ToSummary() TrackSummary {
ret := TrackSummary{
time: l.GetCTime(),
isRemote: l.IsRemote(),
}
return ret
}
func (l TrackLookup) GetProofState(id string) keybase1.ProofState {
return l.set.GetProofState(id)
}
func (l TrackLookup) GetTrackerSeqno() keybase1.Seqno {
return l.trackerSeqno
}
|
ret, err := l.link.GetTrackedKeys()
if err != nil {
l.G().Log.Warning("Error in lookup of tracked PGP fingerprints: %s", err)
}
return ret
}
func (l TrackLookup) GetEldestKID() keybase1.KID {
ret, err := l.link.GetEldestKID()
if err != nil {
l.G().Log.Warning("Error in lookup of eldest KID: %s", err)
}
return ret
}
func (l TrackLookup) GetTmpExpireTime() (ret time.Time) {
return l.link.GetTmpExpireTime()
}
func (l TrackLookup) IsRemote() bool {
return l.link.IsRemote()
}
type TrackDiff interface {
BreaksTracking() bool
ToDisplayString() string
ToDisplayMarkup() *Markup
IsSameAsTracked() bool
GetTrackDiffType() keybase1.TrackDiffType
}
type TrackDiffUpgraded struct {
prev, curr string
}
func (t TrackDiffUpgraded) IsSameAsTracked() bool {
return false
}
func (t TrackDiffUpgraded) BreaksTracking() bool {
return false
}
func (t TrackDiffUpgraded) ToDisplayString() string {
return "Upgraded from " + t.prev + " to " + t.curr
}
func (t TrackDiffUpgraded) GetPrev() string { return t.prev }
func (t TrackDiffUpgraded) GetCurr() string { return t.curr }
func (t TrackDiffUpgraded) ToDisplayMarkup() *Markup {
return NewMarkup(t.ToDisplayString())
}
func (t TrackDiffUpgraded) GetTrackDiffType() keybase1.TrackDiffType {
return keybase1.TrackDiffType_UPGRADED
}
type TrackDiffNone struct{}
func (t TrackDiffNone) BreaksTracking() bool {
return false
}
func (t TrackDiffNone) IsSameAsTracked() bool {
return true
}
func (t TrackDiffNone) ToDisplayString() string {
return "tracked"
}
func (t TrackDiffNone) ToDisplayMarkup() *Markup {
return NewMarkup(t.ToDisplayString())
}
func (t TrackDiffNone) GetTrackDiffType() keybase1.TrackDiffType {
return keybase1.TrackDiffType_NONE
}
type TrackDiffNoneViaTemporary struct{}
func (t TrackDiffNoneViaTemporary) BreaksTracking() bool { return false }
func (t TrackDiffNoneViaTemporary) IsSameAsTracked() bool { return true }
func (t TrackDiffNoneViaTemporary) ToDisplayString() string { return "snoozed" }
func (t TrackDiffNoneViaTemporary) ToDisplayMarkup() *Markup { return NewMarkup(t.ToDisplayString()) }
func (t TrackDiffNoneViaTemporary) GetTrackDiffType() keybase1.TrackDiffType {
return keybase1.TrackDiffType_NONE_VIA_TEMPORARY
}
type TrackDiffNew struct{}
func (t TrackDiffNew) BreaksTracking() bool {
return false
}
func (t TrackDiffNew) IsSameAsTracked() bool {
return false
}
type TrackDiffClash struct {
observed, expected string
}
func (t TrackDiffNew) ToDisplayString() string {
return "new"
}
func (t TrackDiffNew) ToDisplayMarkup() *Markup {
return NewMarkup(t.ToDisplayString())
}
func (t TrackDiffNew) GetTrackDiffType() keybase1.TrackDiffType {
return keybase1.TrackDiffType_NEW
}
func (t TrackDiffClash) BreaksTracking() bool {
return true
}
func (t TrackDiffClash) ToDisplayString() string {
return "CHANGED from \"" + t.expected + "\""
}
func (t TrackDiffClash) IsSameAsTracked() bool {
return false
}
func (t TrackDiffClash) ToDisplayMarkup() *Markup {
return NewMarkup(t.ToDisplayString())
}
func (t TrackDiffClash) GetTrackDiffType() keybase1.TrackDiffType {
return keybase1.TrackDiffType_CLASH
}
type TrackDiffRevoked struct {
idc TrackIDComponent
}
func (t TrackDiffRevoked) BreaksTracking() bool {
return true
}
func (t TrackDiffRevoked) ToDisplayString() string {
return "Deleted proof: " + t.idc.ToIDString()
}
func (t TrackDiffRevoked) IsSameAsTracked() bool {
return false
}
func (t TrackDiffRevoked) ToDisplayMarkup() *Markup {
return NewMarkup(t.ToDisplayString())
}
func (t TrackDiffRevoked) GetTrackDiffType() keybase1.TrackDiffType {
return keybase1.TrackDiffType_REVOKED
}
type TrackDiffSnoozedRevoked struct {
idc TrackIDComponent
}
func (t TrackDiffSnoozedRevoked) BreaksTracking() bool {
return false
}
func (t TrackDiffSnoozedRevoked) ToDisplayString() string {
return "Deleted proof: " + t.idc.ToIDString() + " (snoozed)"
}
func (t TrackDiffSnoozedRevoked) IsSameAsTracked() bool {
return true
}
func (t TrackDiffSnoozedRevoked) ToDisplayMarkup() *Markup {
return NewMarkup(t.ToDisplayString())
}
func (t TrackDiffSnoozedRevoked) GetTrackDiffType() keybase1.TrackDiffType {
return keybase1.TrackDiffType_NONE_VIA_TEMPORARY
}
type TrackDiffRemoteFail struct {
observed keybase1.ProofState
}
func (t TrackDiffRemoteFail) BreaksTracking() bool {
return true
}
func (t TrackDiffRemoteFail) ToDisplayString() string {
return "remote failed"
}
func (t TrackDiffRemoteFail) ToDisplayMarkup() *Markup {
return NewMarkup(t.ToDisplayString())
}
func (t TrackDiffRemoteFail) GetTrackDiffType() keybase1.TrackDiffType {
return keybase1.TrackDiffType_REMOTE_FAIL
}
func (t TrackDiffRemoteFail) IsSameAsTracked() bool {
return false
}
type TrackDiffRemoteWorking struct {
tracked keybase1.ProofState
}
func (t TrackDiffRemoteWorking) BreaksTracking() bool {
return false
}
func (t TrackDiffRemoteWorking) ToDisplayString() string {
return "newly working"
}
func (t TrackDiffRemoteWorking) ToDisplayMarkup() *Markup {
return NewMarkup(t.ToDisplayString())
}
func (t TrackDiffRemoteWorking) GetTrackDiffType() keybase1.TrackDiffType {
return keybase1.TrackDiffType_REMOTE_WORKING
}
func (t TrackDiffRemoteWorking) IsSameAsTracked() bool {
return false
}
type TrackDiffRemoteChanged struct {
tracked, observed keybase1.ProofState
}
func (t TrackDiffRemoteChanged) BreaksTracking() bool {
return false
}
func (t TrackDiffRemoteChanged) ToDisplayString() string {
return "changed"
}
func (t TrackDiffRemoteChanged) ToDisplayMarkup() *Markup {
return NewMarkup(t.ToDisplayString())
}
func (t TrackDiffRemoteChanged) GetTrackDiffType() keybase1.TrackDiffType {
return keybase1.TrackDiffType_REMOTE_CHANGED
}
func (t TrackDiffRemoteChanged) IsSameAsTracked() bool {
return false
}
type TrackDiffNewEldest struct {
tracked keybase1.KID
observed keybase1.KID
}
func (t TrackDiffNewEldest) BreaksTracking() bool {
return true
}
func (t TrackDiffNewEldest) IsSameAsTracked() bool {
return false
}
func (t TrackDiffNewEldest) GetTrackDiffType() keybase1.TrackDiffType {
return keybase1.TrackDiffType_NEW_ELDEST
}
func (t TrackDiffNewEldest) ToDisplayString() string {
if t.tracked.IsNil() {
return fmt.Sprintf("No key when followed; established new eldest key %s", t.observed)
}
return fmt.Sprintf("Account reset! Old key was %s; new key is %s", t.tracked, t.observed)
}
func (t TrackDiffNewEldest) ToDisplayMarkup() *Markup {
return NewMarkup(t.ToDisplayString())
}
func NewTrackLookup(g *GlobalContext, link *TrackChainLink) *TrackLookup {
sbs := link.ToServiceBlocks()
set := NewTrackSet()
ids := make(map[string][]string)
for _, sb := range sbs {
set.Add(sb)
k, v := sb.ToKeyValuePair()
ids[k] = append(ids[k], v)
}
ret := &TrackLookup{Contextified: NewContextified(g), link: link, set: set, ids: ids, trackerSeqno: link.GetSeqno()}
return ret
}
func (l *TrackLookup) GetCTime() time.Time {
return l.link.GetCTime()
}
//=====================================================================
func LocalTrackDBKey(tracker, trackee keybase1.UID, expireLocal bool) DbKey {
key := fmt.Sprintf("%s-%s", tracker, trackee)
if expireLocal {
key += "-expires"
}
return DbKey{Typ: DBLocalTrack, Key: key}
}
//=====================================================================
func localTrackChainLinkFor(m MetaContext, tracker, trackee keybase1.UID, localExpires bool) (ret *TrackChainLink, err error) {
data, _, err := m.G().LocalDb.GetRaw(LocalTrackDBKey(tracker, trackee, localExpires))
if err != nil {
m.CDebugf("| DB lookup failed")
return nil, err
}
if data == nil || len(data) == 0 {
m.CDebugf("| No local track found")
return nil, nil
}
cl := &ChainLink{Contextified: NewContextified(m.G()), unsigned: true}
if err = cl.UnpackLocal(data); err != nil {
m.CDebugf("| unpack local failed -> %s", err)
return nil, err
}
var linkETime time.Time
if localExpires {
linkETime = cl.GetCTime().Add(m.G().Env.GetLocalTrackMaxAge())
m.CDebugf("| local track created %s, expires: %s, it is now %s", cl.GetCTime(), linkETime.String(), m.G().Clock().Now())
if linkETime.Before(m.G().Clock().Now()) {
m.CDebugf("| expired local track, deleting")
removeLocalTrack(m, tracker, trackee, true)
return nil, ErrTrackingExpired
}
}
base := GenericChainLink{cl}
ret, err = ParseTrackChainLink(base)
if ret != nil && err == nil {
ret.local = true
ret.tmpExpireTime = linkETime
}
return ret, err
}
func LocalTrackChainLinkFor(m MetaContext, tracker, trackee keybase1.UID) (ret *TrackChainLink, err error) {
return localTrackChainLinkFor(m, tracker, trackee, false)
}
func LocalTmpTrackChainLinkFor(m MetaContext, tracker, trackee keybase1.UID) (ret *TrackChainLink, err error) {
return localTrackChainLinkFor(m, tracker, trackee, true)
}
func StoreLocalTrack(m MetaContext, tracker keybase1.UID, trackee keybase1.UID, expiringLocal bool, statement *jsonw.Wrapper) error {
m.CDebugf("| StoreLocalTrack, expiring = %v", expiringLocal)
err := m.G().LocalDb.Put(LocalTrackDBKey(tracker, trackee, expiringLocal), nil, statement)
if err == nil {
m.G().IdentifyDispatch.NotifyTrackingSuccess(m, trackee)
}
return err
}
func removeLocalTrack(m MetaContext, tracker keybase1.UID, trackee keybase1.UID, expiringLocal bool) error {
m.CDebugf("| RemoveLocalTrack, expiring = %v", expiringLocal)
return m.G().LocalDb.Delete(LocalTrackDBKey(tracker, trackee, expiringLocal))
}
func RemoveLocalTracks(m MetaContext, tracker keybase1.UID, trackee keybase1.UID) error {
e1 := removeLocalTrack(m, tracker, trackee, false)
e2 := removeLocalTrack(m, tracker, trackee, true)
return PickFirstError(e1, e2)
}
|
func (l TrackLookup) GetTrackedKeys() []TrackedKey {
|
metadata.go
|
package crud
import (
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
"path"
"github.com/ViBiOh/fibr/pkg/provider"
)
var (
metadataFilename = path.Join(provider.MetadataDirectoryName, ".json")
)
func (a *app) loadMetadata() error {
_, err := a.storage.Info(metadataFilename)
if err != nil && !provider.IsNotExist(err) {
return err
}
if provider.IsNotExist(err) {
if err := a.storage.CreateDir(provider.MetadataDirectoryName); err != nil {
return err
}
a.metadatas = make([]*provider.Share, 0)
return nil
}
file, err := a.storage.ReaderFrom(metadataFilename)
if file != nil {
defer func() {
if closeErr := file.Close(); closeErr != nil {
err = fmt.Errorf("%s: %w", err, closeErr)
}
}()
}
if err != nil {
return err
}
|
rawMeta, err := ioutil.ReadAll(file)
if err != nil {
return err
}
if err = json.Unmarshal(rawMeta, &a.metadatas); err != nil {
return err
}
return nil
}
func (a *app) saveMetadata() (err error) {
if !a.metadataEnabled {
return errors.New("metadata not enabled")
}
content, err := json.MarshalIndent(&a.metadatas, "", " ")
if err != nil {
return err
}
file, err := a.storage.WriterTo(metadataFilename)
if file != nil {
defer func() {
if closeErr := file.Close(); closeErr != nil {
err = fmt.Errorf("%s: %w", err, closeErr)
}
}()
}
if err != nil {
return err
}
n, err := file.Write(content)
if err != nil {
return err
}
if n < len(content) {
return io.ErrShortWrite
}
return nil
}
| |
stream.rs
|
// Copyright 2018 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fmt;
use std::sync::Arc;
use crc::crc32::{self, Hasher32};
use uuid::Uuid;
use kvproto::import_sstpb::*;
use kvproto::metapb::*;
use rocksdb::{DBIterator, SeekKey, DB};
use super::client::*;
use super::common::*;
use super::engine::*;
use super::{Config, Result};
pub struct SSTFile {
pub meta: SSTMeta,
pub data: Vec<u8>,
}
impl SSTFile {
pub fn inside_region(&self, region: &Region) -> bool {
let range = self.meta.get_range();
assert!(range.get_start() <= range.get_end());
inside_region(range.get_start(), region) && inside_region(range.get_end(), region)
}
}
impl fmt::Debug for SSTFile {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let uuid = Uuid::from_bytes(self.meta.get_uuid()).unwrap();
f.debug_struct("SSTFile")
.field("uuid", &uuid)
.field("range", self.meta.get_range())
.field("length", &self.meta.get_length())
.field("cf_name", &self.meta.get_cf_name().to_owned())
.finish()
}
}
pub type SSTRange = (Range, Vec<SSTFile>);
pub struct SSTFileStream<Client> {
ctx: RangeContext<Client>,
iter: RangeIterator,
engine: Arc<Engine>,
stream_range: Range,
}
impl<Client: ImportClient> SSTFileStream<Client> {
pub fn new(
cfg: Config,
client: Arc<Client>,
engine: Arc<Engine>,
stream_range: Range,
finished_ranges: Vec<Range>,
) -> SSTFileStream<Client> {
let ctx = RangeContext::new(client, cfg.region_split_size.0 as usize);
let engine_iter = engine.new_iter(true);
let iter = RangeIterator::new(engine_iter, stream_range.clone(), finished_ranges);
SSTFileStream {
ctx,
iter,
engine,
stream_range,
}
}
pub fn next(&mut self) -> Result<Option<SSTRange>> {
if !self.iter.valid() {
return Ok(None);
}
let mut w = self.engine.new_sst_writer()?;
let start = self.iter.key().to_owned();
self.ctx.reset(&start);
loop {
{
let k = self.iter.key();
let v = self.iter.value();
w.put(k, v)?;
self.ctx.add(k.len() + v.len());
}
if !self.iter.next() || self.ctx.should_stop_before(self.iter.key()) {
break;
}
}
let end = if self.iter.valid() {
self.iter.key()
} else {
self.stream_range.get_end()
};
let range = new_range(&start, end);
let infos = w.finish()?;
let mut ssts = Vec::new();
for info in infos {
ssts.push(self.new_sst_file(info));
}
Ok(Some((range, ssts)))
}
fn new_sst_file(&self, info: SSTInfo) -> SSTFile {
let mut digest = crc32::Digest::new(crc32::IEEE);
digest.write(&info.data);
let crc32 = digest.sum32();
let length = info.data.len() as u64;
let mut meta = SSTMeta::new();
meta.set_uuid(Uuid::new_v4().as_bytes().to_vec());
meta.set_range(info.range.clone());
meta.set_crc32(crc32);
meta.set_length(length);
meta.set_cf_name(info.cf_name.clone());
SSTFile {
meta,
data: info.data,
}
}
}
pub struct RangeIterator {
iter: DBIterator<Arc<DB>>,
ranges: Vec<Range>,
ranges_index: usize,
}
impl RangeIterator {
pub fn new(
iter: DBIterator<Arc<DB>>,
range: Range,
mut finished_ranges: Vec<Range>,
) -> RangeIterator {
finished_ranges.sort_by(|a, b| a.get_start().cmp(b.get_start()));
// Collect unfinished ranges.
let mut ranges = Vec::new();
let mut last_end = range.get_start();
let mut reach_end = false;
for range in &finished_ranges {
if last_end < range.get_start() {
ranges.push(new_range(last_end, range.get_start()));
}
if before_end(last_end, range.get_end()) {
last_end = range.get_end();
}
if last_end == RANGE_MAX {
reach_end = true;
break;
}
}
// Handle the last unfinished range.
if !reach_end && before_end(last_end, range.get_end()) {
ranges.push(new_range(last_end, range.get_end()));
}
let mut res = RangeIterator {
iter,
ranges,
ranges_index: 0,
};
// Seek to the first valid range.
res.seek_next();
res
}
pub fn next(&mut self) -> bool {
if !self.iter.next() {
return false;
}
{
let range = &self.ranges[self.ranges_index];
if before_end(self.iter.key(), range.get_end()) {
return true;
}
self.ranges_index += 1;
}
self.seek_next()
}
fn seek_next(&mut self) -> bool {
while let Some(range) = self.ranges.get(self.ranges_index) {
if !self.iter.seek(SeekKey::Key(range.get_start())) {
break;
}
assert!(self.iter.key() >= range.get_start());
if before_end(self.iter.key(), range.get_end()) {
break;
}
self.ranges_index += 1;
}
self.valid()
}
pub fn key(&self) -> &[u8] {
self.iter.key()
}
pub fn value(&self) -> &[u8] {
self.iter.value()
}
pub fn valid(&self) -> bool {
self.iter.valid() && self.ranges_index < self.ranges.len()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::import::test_helpers::*;
use std::path::Path;
use std::sync::Arc;
use rocksdb::{DBIterator, DBOptions, ReadOptions, Writable, DB};
use tempdir::TempDir;
use crate::config::DbConfig;
use crate::storage::types::Key;
fn open_db<P: AsRef<Path>>(path: P) -> Arc<DB> {
let path = path.as_ref().to_str().unwrap();
let mut opts = DBOptions::new();
opts.create_if_missing(true);
let db = DB::open(opts, path).unwrap();
Arc::new(db)
}
fn new_int_range(start: Option<i32>, end: Option<i32>) -> Range {
let mut range = Range::new();
if let Some(start) = start {
let k = format!("k-{:04}", start);
range.set_start(k.as_bytes().to_owned());
}
if let Some(end) = end {
let k = format!("k-{:04}", end);
range.set_end(k.as_bytes().to_owned());
}
range
}
fn new_range_iter(db: Arc<DB>, range: Range, skip_ranges: Vec<Range>) -> RangeIterator {
let ropts = ReadOptions::new();
let iter = DBIterator::new(Arc::clone(&db), ropts);
RangeIterator::new(iter, range, skip_ranges)
}
fn check_range_iter(iter: &mut RangeIterator, start: i32, end: i32) {
for i in start..end {
let k = format!("k-{:04}", i);
let v = format!("v-{:04}", i);
assert!(iter.valid());
assert_eq!(iter.key(), k.as_bytes());
assert_eq!(iter.value(), v.as_bytes());
iter.next();
}
}
fn
|
(
db: Arc<DB>,
range_opt: (Option<i32>, Option<i32>),
finished_ranges_opt: &[(Option<i32>, Option<i32>)],
unfinished_ranges: &[(i32, i32)],
) {
let range = new_int_range(range_opt.0, range_opt.1);
let mut finished_ranges = Vec::new();
for &(start, end) in finished_ranges_opt {
finished_ranges.push(new_int_range(start, end));
}
let mut iter = new_range_iter(db, range, finished_ranges);
for &(start, end) in unfinished_ranges {
check_range_iter(&mut iter, start, end);
}
assert!(!iter.valid());
}
#[test]
fn test_range_iterator() {
let dir = TempDir::new("_tikv_test_tmp_db").unwrap();
let db = open_db(dir.path());
for i in 0..100 {
let k = format!("k-{:04}", i);
let v = format!("v-{:04}", i);
db.put(k.as_bytes(), v.as_bytes()).unwrap();
}
// No finished ranges.
test_range_iterator_with(Arc::clone(&db), (None, None), &[], &[(0, 100)]);
test_range_iterator_with(Arc::clone(&db), (None, Some(25)), &[], &[(0, 25)]);
test_range_iterator_with(Arc::clone(&db), (Some(0), Some(25)), &[], &[(0, 25)]);
test_range_iterator_with(Arc::clone(&db), (Some(25), Some(75)), &[], &[(25, 75)]);
test_range_iterator_with(Arc::clone(&db), (Some(75), Some(100)), &[], &[(75, 100)]);
test_range_iterator_with(Arc::clone(&db), (Some(75), None), &[], &[(75, 100)]);
// Range [None, None) with some finished ranges.
test_range_iterator_with(Arc::clone(&db), (None, None), &[(None, None)], &[]);
test_range_iterator_with(
Arc::clone(&db),
(None, None),
&[(None, Some(25)), (Some(50), Some(75))],
&[(25, 50), (75, 100)],
);
test_range_iterator_with(
Arc::clone(&db),
(None, None),
&[(Some(25), Some(50)), (Some(75), None)],
&[(0, 25), (50, 75)],
);
test_range_iterator_with(
Arc::clone(&db),
(None, None),
&[
(Some(0), Some(25)),
(Some(50), Some(60)),
(Some(60), Some(70)),
],
&[(25, 50), (70, 100)],
);
test_range_iterator_with(
Arc::clone(&db),
(None, None),
&[
(Some(10), Some(30)),
(Some(50), Some(70)),
(Some(80), Some(90)),
(Some(20), Some(40)),
(Some(60), Some(80)),
(Some(70), Some(100)),
],
&[(0, 10), (40, 50)],
);
// Range [25, 75) with some finished ranges.
test_range_iterator_with(Arc::clone(&db), (Some(25), Some(75)), &[(None, None)], &[]);
test_range_iterator_with(
Arc::clone(&db),
(Some(25), Some(75)),
&[(None, Some(30)), (Some(50), Some(75))],
&[(30, 50)],
);
test_range_iterator_with(
Arc::clone(&db),
(Some(25), Some(75)),
&[(Some(30), Some(50)), (Some(60), None)],
&[(25, 30), (50, 60)],
);
test_range_iterator_with(
Arc::clone(&db),
(Some(25), Some(75)),
&[
(Some(25), Some(30)),
(Some(50), Some(60)),
(Some(60), Some(70)),
],
&[(30, 50), (70, 75)],
);
test_range_iterator_with(
Arc::clone(&db),
(Some(25), Some(75)),
&[
(Some(35), Some(45)),
(Some(55), Some(60)),
(Some(70), Some(75)),
(Some(30), Some(40)),
(Some(50), Some(65)),
(Some(60), Some(75)),
],
&[(25, 30), (45, 50)],
);
}
fn new_encoded_range(start: u8, end: u8) -> Range {
let k1 = Key::from_raw(&[start]).append_ts(0);
let k2 = Key::from_raw(&[end]).append_ts(0);
new_range(k1.as_encoded(), k2.as_encoded())
}
#[test]
fn test_sst_file_stream() {
let dir = TempDir::new("test_import_sst_file_stream").unwrap();
let uuid = Uuid::new_v4();
let opts = DbConfig::default();
let engine = Arc::new(Engine::new(dir.path(), uuid, opts).unwrap());
for i in 0..16 {
let k = Key::from_raw(&[i]).append_ts(0);
assert_eq!(k.as_encoded().len(), 17);
engine.put(k.as_encoded(), k.as_encoded()).unwrap();
}
let mut cfg = Config::default();
cfg.region_split_size.0 = 128; // An SST contains at most 4 entries.
let mut client = MockClient::new();
let keys = vec![
// [0, 3], [4, 6]
7, // [7, 9]
10,
// [10, 13], [14, 15]
];
let mut last = vec![];
for i in keys {
let k = Key::from_raw(&[i]).append_ts(0);
client.add_region_range(&last, k.as_encoded());
last = k.into_encoded();
}
// Add an unrelated range.
client.add_region_range(&last, b"abc");
client.add_region_range(b"abc", b"");
let client = Arc::new(client);
// Test all ranges.
{
let sst_range = new_range(RANGE_MIN, RANGE_MAX);
let finished_ranges = Vec::new();
let expected_ranges = vec![
(0, 3, Some(4)),
(4, 6, Some(7)),
(7, 9, Some(10)),
(10, 13, Some(14)),
(14, 15, None),
];
run_and_check_stream(
cfg.clone(),
Arc::clone(&client),
Arc::clone(&engine),
sst_range,
finished_ranges,
expected_ranges,
);
}
// Test sst range [1, 15) with finished ranges [3, 5), [7, 10).
{
let sst_range = new_encoded_range(1, 15);
let mut finished_ranges = Vec::new();
finished_ranges.push(new_encoded_range(3, 5));
finished_ranges.push(new_encoded_range(7, 11));
let expected_ranges = vec![(1, 6, Some(11)), (11, 14, Some(15))];
run_and_check_stream(
cfg.clone(),
Arc::clone(&client),
Arc::clone(&engine),
sst_range,
finished_ranges,
expected_ranges,
);
}
}
fn run_and_check_stream(
cfg: Config,
client: Arc<MockClient>,
engine: Arc<Engine>,
sst_range: Range,
finished_ranges: Vec<Range>,
expected_ranges: Vec<(u8, u8, Option<u8>)>,
) {
let mut stream = SSTFileStream::new(cfg, client, engine, sst_range, finished_ranges);
for (start, end, range_end) in expected_ranges {
let (range, ssts) = stream.next().unwrap().unwrap();
let start = Key::from_raw(&[start]).append_ts(0).into_encoded();
let end = Key::from_raw(&[end]).append_ts(0).into_encoded();
let range_end = match range_end {
Some(v) => Key::from_raw(&[v]).append_ts(0).into_encoded(),
None => RANGE_MAX.to_owned(),
};
assert_eq!(range.get_start(), start.as_slice());
assert_eq!(range.get_end(), range_end.as_slice());
for sst in ssts {
assert_eq!(sst.meta.get_range().get_start(), start.as_slice());
assert_eq!(sst.meta.get_range().get_end(), end.as_slice());
}
}
assert!(stream.next().unwrap().is_none());
}
}
|
test_range_iterator_with
|
modal.js
|
var i, len, modal, modals, options;
BULMA.toggleModal = function(el, options) {
if (!options.target) {
throw new Error('Found [BULMA-MODAL] but there is no target defined!');
}
el.addEventListener('click', function(e) {
|
backdrop = modal.querySelector('.modal-background');
closeBtn = modal.querySelector('.modal-close');
closeModal = function() {
if (BULMA.hasClass(modal, 'is-active')) {
BULMA.removeClass(modal, 'is-active');
return BULMA.unclick(this, closeModal);
}
};
if (options.closeByBackdrop === void 0 || options.closeByBackdrop) {
BULMA.click(backdrop, closeModal);
}
if (options.closeByButton === void 0 || options.closeByButton) {
BULMA.click(closeBtn, closeModal);
}
BULMA.addClass(modal, 'is-active');
});
};
if (!BULMA.isReady) {
modals = BULMA.getElements('modal');
if (modals && modals.length > 0) {
for (i = 0, len = modals.length; i < len; i++) {
modal = modals[i];
options = BULMA.parseOptions(modal);
BULMA.toggleModal(modal, options);
}
}
}
|
var backdrop, closeBtn, closeModal, modal;
e.preventDefault();
e.stopPropagation();
modal = document.getElementById(options.target);
|
main.go
|
package main
import (
"fmt"
"io/ioutil"
"net/http"
"os"
"os/signal"
"path"
"syscall"
"time"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promhttp"
|
"github.com/prometheus/common/log"
kingpin "gopkg.in/alecthomas/kingpin.v2"
yaml "gopkg.in/yaml.v2"
)
type queryJob struct {
Id string
Timespan string
Repo string
MetricName string
MetricLabels []MetricLabel
}
type queryJobData struct {
Done bool
Events []map[string]interface{}
FieldOrder []string
MetaData map[string]interface{}
ExtraData map[string]string
ProcessedEvents int
}
type MetricMap struct {
Gauges map[string]*prometheus.GaugeVec
}
type YamlConfig struct {
Queries []struct {
Query string `yaml:"query"`
Repo string `yaml:"repo"`
Interval string `yaml:"interval"`
MetricName string `yaml:"metric_name"`
MetricLabels []MetricLabel `yaml:"metric_labels"`
} `yaml:"queries"`
}
type MetricLabel struct {
Key string `yaml:"key"`
Value string `yaml:"value"`
}
var (
version = ""
supportedFunctions = []string{"_count", "_min", "_max", "_avg", "_rate", "_range", "_stddev", "_sum"}
)
const (
repoLabel = "repo"
intervalLabel = "interval"
)
func main() {
flags := kingpin.New("humio_exporter", "Humio exporter for Prometheus. Provide your Humio API token and configuration file with queries to expose as Prometheus metrics.")
configFile := flags.Flag("config", "The humio_exporter configuration file to be used").Required().String()
baseURL := flags.Flag("humio.url", "Humio base API url").Required().String()
apiToken := flags.Flag("humio.api-token", "Humio API token").Required().String()
requestTimeout := flags.Flag("humio.timeout", "Timeout for requests against the Humio API").Default("10").Int()
listenAddress := flags.Flag("web.listen-address", "Address on which to expose metrics.").Default(":9534").String()
log.AddFlags(flags)
flags.HelpFlag.Short('h')
flags.Version(version)
kingpin.MustParse(flags.Parse(os.Args[1:]))
// Parse YAML queries file
yamlConfig := YamlConfig{}
currentDir, err := os.Getwd()
if err != nil {
log.Fatal(err)
}
yamlFile, err := ioutil.ReadFile(path.Join(currentDir, *configFile))
if err != nil {
log.Infof("yamlFile.Get err #%v ", err)
}
err = yaml.Unmarshal([]byte(yamlFile), &yamlConfig)
if err != nil {
log.Fatalf("error: %v", err)
}
// Register the prometheus metrics
metricMap := MetricMap{
Gauges: make(map[string]*prometheus.GaugeVec),
}
for _, q := range yamlConfig.Queries {
metricMap.AddGauge(q.MetricName, q.MetricLabels)
}
err = metricMap.Register()
if err != nil {
log.Fatalf("error: %v", err)
}
http.Handle("/metrics", promhttp.Handler())
http.HandleFunc("/healthz", func(w http.ResponseWriter, r *http.Request) {
fmt.Fprintf(w, "healthy")
})
// TODO: Add more logic on when the exporter is actually ready
// e.g. connection to humio is succesful
http.HandleFunc("/ready", func(w http.ResponseWriter, r *http.Request) {
fmt.Fprintf(w, "healthy")
})
done := make(chan error, 1)
go func() {
log.Infof("Listening on %s", *listenAddress)
err := http.ListenAndServe(*listenAddress, nil)
if err != nil {
done <- err
}
}()
go runAPIPolling(done, *baseURL, *apiToken, yamlConfig, secondDuration(*requestTimeout), metricMap)
reason := <-done
if reason != nil {
log.Errorf("Humio_exporter exited due to error: %v", reason)
os.Exit(1)
}
log.Infof("Humio_exporter exited with exit 0")
}
func runAPIPolling(done chan error, url, token string, yamlConfig YamlConfig, requestTimeout time.Duration, metricMap MetricMap) {
client := client{
httpClient: &http.Client{
Timeout: requestTimeout,
},
token: token,
baseURL: url,
}
var jobs []queryJob
for _, q := range yamlConfig.Queries {
job, err := client.startQueryJob(q.Query, q.Repo, q.MetricName, q.Interval, "now", q.MetricLabels)
if err != nil {
done <- err
return
}
jobs = append(jobs, job)
}
sigs := make(chan os.Signal, 1)
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
go func() {
select {
case sig := <-sigs:
for _, job := range jobs {
client.stopQueryJob(job.Id, job.Repo)
}
done <- fmt.Errorf("received os signal '%s'", sig)
}
}()
for {
for _, job := range jobs {
poll, err := client.pollQueryJob(job.Id, job.Repo)
if err != nil {
done <- err
return
}
var floatValue float64
for _, f := range supportedFunctions {
value, ok := poll.Events[0][f]
if !ok {
continue
}
floatValue, err = parseFloat(value)
if err != nil {
done <- err
return
}
break
}
if poll.Done {
metricMap.UpdateMetricValue(job.MetricName, job.Timespan, job.Repo, floatValue, job.MetricLabels)
if err != nil {
done <- err
return
}
} else {
log.Debugf("Skipped value because query isn't done. Timespan: %v, Value: %v", job.Timespan, floatValue)
}
}
time.Sleep(5000 * time.Millisecond)
}
}
func secondDuration(seconds int) time.Duration {
return time.Duration(seconds) * time.Second
}
func (m *MetricMap) Register() error {
for _, v := range m.Gauges {
err := prometheus.Register(v)
if err != nil {
return err
}
}
return nil
}
func (m *MetricMap) UpdateMetricValue(metricName, timespan, repo string, value float64, staticLabels []MetricLabel) error {
labels := make(map[string]string)
labels[intervalLabel] = timespan
labels[repoLabel] = repo
for _, l := range staticLabels {
labels[l.Key] = l.Value
}
gauge := m.Gauges[metricName]
gauge.With(labels).Set(value)
return nil
}
func (m *MetricMap) AddGauge(metricName string, staticLabels []MetricLabel) error {
var labelKeys []string
labelKeys = append(labelKeys, intervalLabel)
labelKeys = append(labelKeys, repoLabel)
for _, l := range staticLabels {
labelKeys = append(labelKeys, l.Key)
}
m.Gauges[metricName] = prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Name: metricName,
Help: "Gauge for humio query",
}, labelKeys)
return nil
}
| |
5.js
|
// 不允许存在自环和平行边的图
class Graph {
constructor(V, e
|
= []) {
this.V = V;
this.E = 0;
this._adj = [];
for (let i = 0; i < V; i++) {
this._adj[i] = [];
}
for (const [startV, endV] of edges) {
this.addEdge(startV, endV);
}
}
adj(v) {
return this._adj[v];
}
addEdge(startV, endV) {
if (this._adj[startV].includes(endV)) {
throw new Error(`存在平行边 ${startV} -> ${endV}`);
}
if (this._adj[endV].includes(startV)) {
throw new Error(`存在平行边 ${endV} -> ${startV}`);
}
if (startV === endV) {
throw new Error(`存在自环 ${startV} -> ${endV}`);
}
this._adj[startV].unshift(endV);
this._adj[endV].unshift(startV);
this.E++;
}
hasEdge(v, w) {
return this.adj(v).includes(w);
}
}
|
dges
|
Loader.ts
|
import { AttriebutesRendererButton } from "./Attributes/Renderer/AttriebutesRendererButton";
import { AttriebutesRendererContainer } from "./Attributes/Renderer/AttriebutesRendererContainer";
import { AttriebutesRendererEmpty } from "./Attributes/Renderer/AttriebutesRendererEmpty";
import { AttriebutesRendererImage } from "./Attributes/Renderer/AttriebutesRendererImage";
import { AttriebutesRendererLabel } from "./Attributes/Renderer/AttriebutesRendererLabel";
import { AttriebutesRendererMask } from "./Attributes/Renderer/AttriebutesRendererMask";
import { AttriebutesRendererPanel } from "./Attributes/Renderer/AttriebutesRendererPanel";
import { AttriebutesRendererScrollView } from "./Attributes/Renderer/AttriebutesRendererScrollView";
import { AttriebutesRendererText } from "./Attributes/Renderer/AttriebutesRendererText";
export function
|
() {
AttriebutesRendererButton.load();
AttriebutesRendererContainer.load();
AttriebutesRendererEmpty.load();
AttriebutesRendererImage.load();
AttriebutesRendererLabel.load();
AttriebutesRendererMask.load();
AttriebutesRendererPanel.load();
AttriebutesRendererScrollView.load();
AttriebutesRendererText.load();
}
|
Load
|
f8e7e30b.a76e522b.js
|
"use strict";(self.webpackChunkdocusaurus_template=self.webpackChunkdocusaurus_template||[]).push([[6718],{3905:function(e,t,n){n.d(t,{Zo:function(){return p},kt:function(){return u}});var a=n(67294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function o(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?r(Object(n),!0).forEach((function(t){i(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):r(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function l(e,t){if(null==e)return{};var n,a,i=function(e,t){if(null==e)return{};var n,a,i={},r=Object.keys(e);for(a=0;a<r.length;a++)n=r[a],t.indexOf(n)>=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a<r.length;a++)n=r[a],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var s=a.createContext({}),m=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},p=function(e){var t=m(e.components);return a.createElement(s.Provider,{value:t},e.children)},c={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},d=a.forwardRef((function(e,t){var n=e.components,i=e.mdxType,r=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),d=m(n),u=i,h=d["".concat(s,".").concat(u)]||d[u]||c[u]||r;return n?a.createElement(h,o(o({ref:t},p),{},{components:n})):a.createElement(h,o({ref:t},p))}));function u(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=n.length,o=new Array(r);o[0]=d;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l.mdxType="string"==typeof e?e:i,o[1]=l;for(var m=2;m<r;m++)o[m]=n[m];return a.createElement.apply(null,o)}return a.createElement.apply(null,n)}d.displayName="MDXCreateElement"},25914:function(e,t,n){n.r(t),n.d(t,{frontMatter:function(){return l},contentTitle:function(){return s},metadata:function(){return m},toc:function(){return p},default:function(){return d}});var a=n(87462),i=n(63366),r=(n(67294),n(3905)),o=["components"],l={id:"email-sms",title:"Out-of-band communication via E-Mail and SMS"},s=void 0,m={unversionedId:"concepts/email-sms",id:"version-v0.8/concepts/email-sms",isDocsHomePage:!1,title:"Out-of-band communication via E-Mail and SMS",description:"Ory Kratos sends out-of-band messages via SMS or E-Mail. The following exemplary use cases require these messages:",source:"@site/versioned_docs/version-v0.8/concepts/email-sms.md",sourceDirName:"concepts",slug:"/concepts/email-sms",permalink:"/kratos/docs/concepts/email-sms",editUrl:"https://github.com/ory/kratos/edit/master/docs/versioned_docs/version-v0.8/concepts/email-sms.md",tags:[],version:"v0.8",lastUpdatedBy:"Vincent",lastUpdatedAt:1639741859,formattedLastUpdatedAt:"12/17/2021",frontMatter:{id:"email-sms",title:"Out-of-band communication via E-Mail and SMS"},sidebar:"version-v0.8/docs",previous:{title:"HTTP Redirection Configuration",permalink:"/kratos/docs/concepts/browser-redirect-flow-completion"},next:{title:"REST API Design",permalink:"/kratos/docs/concepts/rest-api"}},p=[{value:"Mail courier",id:"mail-courier",children:[{value:"Single instance",id:"single-instance",children:[],level:3},{value:"Multi-instance",id:"multi-instance",children:[],level:3}],level:2},{value:"Sending E-Mails via SMTP",id:"sending-e-mails-via-smtp",children:[{value:"Sender Address and Template Customization",id:"sender-address-and-template-customization",children:[],level:3},{value:"The Identity attribute",id:"the-identity-attribute",children:[],level:3},{value:"Nested templates",id:"nested-templates",children:[{value:"Example: i18n customization",id:"example-i18n-customization",children:[],level:4}],level:3},{value:"Custom Headers",id:"custom-headers",children:[],level:3}],level:2},{value:"Sending SMS",id:"sending-sms",children:[],level:2}],c={toc:p};function
|
(e){var t=e.components,n=(0,i.Z)(e,o);return(0,r.kt)("wrapper",(0,a.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"Ory Kratos sends out-of-band messages via SMS or E-Mail. The following exemplary use cases require these messages:"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Send an account activation email"),(0,r.kt)("li",{parentName:"ul"},"Verify an E-Mail address or mobile phone number using SMS"),(0,r.kt)("li",{parentName:"ul"},"Preventing Account Enumeration Attacks"),(0,r.kt)("li",{parentName:"ul"},"Sending a 2FA Codes"),(0,r.kt)("li",{parentName:"ul"},"...")),(0,r.kt)("h2",{id:"mail-courier"},"Mail courier"),(0,r.kt)("p",null,"Ory Kratos processes email dispatch using a mail courier worker, which must run\nas a singleton in order to process the mail queue correctly. It can be run as a\nbackground worker on a single-instance Kratos setup or as a distinct singleton\nforeground worker in multi-instance deployments."),(0,r.kt)("h3",{id:"single-instance"},"Single instance"),(0,r.kt)("p",null,"To run the mail courier in the background on your single Kratos instance, add\nthe ",(0,r.kt)("inlineCode",{parentName:"p"},"--watch-courier")," flag to your ",(0,r.kt)("inlineCode",{parentName:"p"},"kratos serve")," command, as outlined in the\n",(0,r.kt)("a",{parentName:"p",href:"/kratos/docs/cli/kratos-serve"},"CLI docs")),(0,r.kt)("h3",{id:"multi-instance"},"Multi-instance"),(0,r.kt)("p",null,"If you're running multiple instances of Kratos (eg replicated Kubernetes\ndeployment), you need to run the mail courier as a separate singleton job. The\ncourier can be started with the ",(0,r.kt)("inlineCode",{parentName:"p"},"kratos courier watch")," command\n(",(0,r.kt)("a",{parentName:"p",href:"/kratos/docs/cli/kratos-courier"},"CLI docs"),")."),(0,r.kt)("h2",{id:"sending-e-mails-via-smtp"},"Sending E-Mails via SMTP"),(0,r.kt)("p",null,"To have E-Mail delivery running with Ory Kratos requires an SMTP server. This is\nset up in the configuration file using an absolute URL with the ",(0,r.kt)("inlineCode",{parentName:"p"},"smtp")," or\n",(0,r.kt)("inlineCode",{parentName:"p"},"smtps")," scheme:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-yaml",metastring:'title="path/to/my/kratos/config.yml"',title:'"path/to/my/kratos/config.yml"'},'# $ kratos -c path/to/my/kratos/config.yml serve\ncourier:\n smtp:\n connection_uri: smtps://foo:bar@my-smtp-server:1234/\n # Examples:\n # - "smtp://foo:bar@my-mailserver:1234/?disable_starttls=true\n # (NOT RECOMMENDED: Cleartext smtp for devel and legacy infrastructure\n # only)"\n # - smtp://foo:bar@my-mailserver:1234/ (Explicit StartTLS with certificate\n # trust verification)\n # - "smtp://foo:bar@my-mailserver:1234/?skip_ssl_verify=true (NOT\n # RECOMMENDED: Explicit StartTLS without certificate trust verification)"\n # - smtps://foo:bar@my-mailserver:1234/ (Implicit TLS with certificate trust\n # verification)\n # - "smtps://foo:bar@my-mailserver:1234/?skip_ssl_verify=true (NOT\n # RECOMMENDED: Implicit TLS without certificate trust verification)"\n')),(0,r.kt)("h3",{id:"sender-address-and-template-customization"},"Sender Address and Template Customization"),(0,r.kt)("p",null,"You can customize the sender address and email templates."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-yaml",metastring:'title="path/to/my/kratos/config.yml"',title:'"path/to/my/kratos/config.yml"'},"# $ kratos -c path/to/my/kratos/config.yml serve\ncourier:\n ## SMTP Sender Address ##\n #\n # The recipient of an email will see this as the sender address.\n #\n # Default value: [email protected]\n #\n # Set this value using environment variables on\n # - Linux/macOS:\n # $ export COURIER_SMTP_FROM_ADDRESS=<value>\n # - Windows Command Line (CMD):\n # > set COURIER_SMTP_FROM_ADDRESS=<value>\n #\n smtp:\n from_address: [email protected]\n ## Override message templates ##\n #\n # You can override certain or all message templates by pointing this key to the path where the templates are located.\n #\n # Examples:\n # - /conf/courier-templates\n #\n # Set this value using environment variables on\n # - Linux/macOS:\n # $ export COURIER_TEMPLATE_OVERRIDE_PATH=<value>\n # - Windows Command Line (CMD):\n # > set COURIER_TEMPLATE_OVERRIDE_PATH=<value>\n #\n template_override_path: /conf/courier-templates\n")),(0,r.kt)("p",null,"Ory Kratos comes with built-in templates. If you wish to define your own, custom\ntemplates, you should define ",(0,r.kt)("inlineCode",{parentName:"p"},"template_override_path"),", as shown above, to\nindicate where your custom templates are located. This will become the\n",(0,r.kt)("inlineCode",{parentName:"p"},"<template-root>")," for your custom templates, as indicated below."),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"email.subject.gotmpl"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"email.body.gotmpl")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"email.body.plaintext.gotmpl"),"\nare common template file names expected in the sub directories of the root\ndirectory, corresponding to the respective methods for filling e-mail subject\nand body."),(0,r.kt)("blockquote",null,(0,r.kt)("p",{parentName:"blockquote"},"Templates use the golang template engine in the ",(0,r.kt)("inlineCode",{parentName:"p"},"text/template")," package for\nrendering the ",(0,r.kt)("inlineCode",{parentName:"p"},"email.subject.gotmpl")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"email.body.plaintext.gotmpl"),"\ntemplates, and the ",(0,r.kt)("inlineCode",{parentName:"p"},"html/template")," package for rendering the\n",(0,r.kt)("inlineCode",{parentName:"p"},"email.body.gotmpl")," template: ",(0,r.kt)("a",{parentName:"p",href:"https://pkg.go.dev/text/template"},"https://pkg.go.dev/text/template")," >\n",(0,r.kt)("a",{parentName:"p",href:"https://pkg.go.dev/html/template"},"https://pkg.go.dev/html/template")),(0,r.kt)("p",{parentName:"blockquote"},"Templates can use the ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/Masterminds/sprig"},"Sprig")," library,\nwhich provides more than 100 commonly used template functions:\n",(0,r.kt)("a",{parentName:"p",href:"http://masterminds.github.io/sprig/"},"http://masterminds.github.io/sprig/"))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"recovery"),": recovery email templates directory, expected to be located in\n",(0,r.kt)("inlineCode",{parentName:"li"},"<root_directory>/recovery"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"valid: sub directory, expected to be located in\n",(0,r.kt)("inlineCode",{parentName:"li"},"<template-root>/recovery/valid"),", containing templates with variables ",(0,r.kt)("inlineCode",{parentName:"li"},"To"),",\n",(0,r.kt)("inlineCode",{parentName:"li"},"RecoveryURL")," and ",(0,r.kt)("inlineCode",{parentName:"li"},"Identity")," for validating a recovery"),(0,r.kt)("li",{parentName:"ul"},"invalid: sub directory, expected to be located in\n",(0,r.kt)("inlineCode",{parentName:"li"},"<template-root>/recovery/invalid"),", containing templates with variables ",(0,r.kt)("inlineCode",{parentName:"li"},"To"),"\nfor invalidating a recovery"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"verification"),": verification email templates directory, expected to be\nlocated in ",(0,r.kt)("inlineCode",{parentName:"li"},"<root_directory>/verification"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"valid: sub directory, expected to be located in\n",(0,r.kt)("inlineCode",{parentName:"li"},"<template-root>/verification/valid"),", containing templates with variables\n",(0,r.kt)("inlineCode",{parentName:"li"},"To"),", ",(0,r.kt)("inlineCode",{parentName:"li"},"VerificationURL")," and ",(0,r.kt)("inlineCode",{parentName:"li"},"Identity")," for validating a verification"),(0,r.kt)("li",{parentName:"ul"},"invalid: sub directory, expected to be located in\n",(0,r.kt)("inlineCode",{parentName:"li"},"<template-root>/verification/invalid"),", containing templates with variables\n",(0,r.kt)("inlineCode",{parentName:"li"},"To")," for invalidating a verification")))),(0,r.kt)("p",null,"For example:\n",(0,r.kt)("a",{parentName:"p",href:"https://github.com/ory/kratos/blob/master/courier/template/courier/builtin/templates/verification/valid/email.body.gotmpl"},(0,r.kt)("inlineCode",{parentName:"a"},"https://github.com/ory/kratos/blob/master/courier/template/courier/builtin/templates/verification/valid/email.body.gotmpl"))),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-gotmpl",metastring:'title="courier/template/templates/verification/valid/email.body.gotmpl"',title:'"courier/template/templates/verification/valid/email.body.gotmpl"'},'Hi, please verify your account by clicking the following link:\n\n<a href="{{ .VerificationURL }}">{{ .VerificationURL }}</a>\n')),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-gotmp",metastring:'title="courier/template/templates/verification/valid/email.body.plaintext.gotmpl"',title:'"courier/template/templates/verification/valid/email.body.plaintext.gotmpl"'},"Hi, please verify your account by clicking the following link: {{ .VerificationURL }}\n")),(0,r.kt)("h3",{id:"the-identity-attribute"},"The Identity attribute"),(0,r.kt)("p",null,"To be able to customize the content of templates based on the identity of the\nrecipient of the e-mail, the identity has been made available as ",(0,r.kt)("inlineCode",{parentName:"p"},"Identity"),".\nThis object is a map containing all the attributes of an identity, such as ",(0,r.kt)("inlineCode",{parentName:"p"},"id"),",\n",(0,r.kt)("inlineCode",{parentName:"p"},"state"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"recovery_addresses"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"verifiable_addresses")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"traits"),"."),(0,r.kt)("h3",{id:"nested-templates"},"Nested templates"),(0,r.kt)("p",null,"You can use nested templates to render ",(0,r.kt)("inlineCode",{parentName:"p"},"email.subject.gotmpl"),",\n",(0,r.kt)("inlineCode",{parentName:"p"},"email.body.gotmpl")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"email.body.plaintext.gotmpl")," templates."),(0,r.kt)("h4",{id:"example-i18n-customization"},"Example: i18n customization"),(0,r.kt)("p",null,"Using nested templates, you can either use in-line template definitions, or as\nin this example, use separate templates. In this example, we will define the\nemail body for recovery e-mails. Assuming that we have an attribute named ",(0,r.kt)("inlineCode",{parentName:"p"},"lang"),"\nthat contains the required language in the ",(0,r.kt)("inlineCode",{parentName:"p"},"traits")," of the identity, we can\ndefine our templates as indicated below."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-txt",metastring:'file="<template-root>/recovery/valid/email.body.gotmpl"',file:'"<template-root>/recovery/valid/email.body.gotmpl"'},'\n{{- if eq .Identity.traits.language "de" -}}\n{{ template "email.body.de.gotmpl" . }}\n{{- else -}}\n{{ template "email.body.en.gotmpl" . }}\n{{- end -}}\n<a href="{{ .RecoveryURL }}">{{.RecoveryURL }}</a>\n')),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-txt",metastring:'file="<template-root>/recovery/valid/email.body.de.gotmpl"',file:'"<template-root>/recovery/valid/email.body.de.gotmpl"'},"\nHallo {{ upper .Identity.traits.firstName }},\n\nUm Ihr Konto wiederherzustellen, klicken Sie bitte auf den folgenden Link:\n")),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-txt",metastring:'file="<template-root>/recovery/valid/email.body.en.gotmpl"',file:'"<template-root>/recovery/valid/email.body.en.gotmpl"'},"\n\nHello {{ upper .Identity.traits.firstName }},\n\nto recover your account, please click on the link below:\n")),(0,r.kt)("p",null,"As indicated by the example, we need a root template, which is the\n",(0,r.kt)("inlineCode",{parentName:"p"},"email.body.gotmpl")," template, and then we define sub templates that conform to\nthe following pattern: ",(0,r.kt)("inlineCode",{parentName:"p"},"email.body*"),". You can also see that the ",(0,r.kt)("inlineCode",{parentName:"p"},"Identity")," of\nthe user is available in all templates, and that you can use Sprig functions\nalso in the nested templates."),(0,r.kt)("h3",{id:"custom-headers"},"Custom Headers"),(0,r.kt)("p",null,"You can configure custom SMTP headers. For example, if integrating with AWS SES\nSMTP interface, the headers can be configured for cross-account sending:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-yaml",metastring:'title="path/to/my/kratos/config.yml"',title:'"path/to/my/kratos/config.yml"'},"# $ kratos -c path/to/my/kratos/config.yml serve\ncourier:\n smtp:\n headers:\n X-SES-SOURCE-ARN: arn:aws:ses:us-west-2:123456789012:identity/example.com\n X-SES-FROM-ARN: arn:aws:ses:us-west-2:123456789012:identity/example.com\n X-SES-RETURN-PATH-ARN: arn:aws:ses:us-west-2:123456789012:identity/example.com\n")),(0,r.kt)("h2",{id:"sending-sms"},"Sending SMS"),(0,r.kt)("p",null,"The Sending SMS feature is not supported at present. It will be available in a\nfuture version of Ory Kratos."))}d.isMDXComponent=!0}}]);
|
d
|
apps.py
|
from django.apps import AppConfig
|
class ShowsConfig(AppConfig):
name = 'shows'
|
|
index.py
|
import base64
import json
import time
|
print ('start handler')
count = len(event['Records'])
print ('Get record count:')
print (count)
for record in event['Records']:
payload = base64.b64decode(record['kinesis']['data']).decode("utf-8")
print("Payload: " + payload)
time.sleep(1)
return 'Successfully {} records.'.format(count)
|
def lambda_handler(event, context):
|
material-dashboard.min40a0.js
|
isWindows = -1 < navigator.platform.indexOf("Win"), isWindows ? ($(".sidebar .sidebar-wrapper, .main-panel").perfectScrollbar(), $("html").addClass("perfect-scrollbar-on")) : $("html").addClass("perfect-scrollbar-off");
var breakCards = !0,
searchVisible = 0,
transparent = !0,
transparentDemo = !0,
fixedTop = !1,
mobile_menu_visible = 0,
mobile_menu_initialized = !1,
toggle_initialized = !1,
bootstrap_nav_initialized = !1,
seq = 0,
delays = 80,
durations = 500,
seq2 = 0,
delays2 = 80,
durations2 = 500;
function debounce(t, n, i) {
var r;
return function() {
var e = this,
a = arguments;
clearTimeout(r), r = setTimeout(function() { r = null, i || t.apply(e, a) }, n), i && !r && t.apply(e, a)
}
}
$(document).ready(function() {
$sidebar = $(".sidebar"), window_width = $(window).width(), $("body").bootstrapMaterialDesign(), md.initSidebarsCheck(), window_width = $(window).width(), md.checkSidebarImage(), md.initMinimizeSidebar(), $(".dropdown-menu a.dropdown-toggle").on("click", function(e) {
var a = $(this),
t = $(this).offsetParent(".dropdown-menu");
return $(this).next().hasClass("show") || $(this).parents(".dropdown-menu").first().find(".show").removeClass("show"), $(this).next(".dropdown-menu").toggleClass("show"), $(this).closest("a").toggleClass("open"), $(this).parents("a.dropdown-item.dropdown.show").on("hidden.bs.dropdown", function(e) { $(".dropdown-menu .show").removeClass("show") }), t.parent().hasClass("navbar-nav") || a.next().css({ top: a[0].offsetTop, left: t.outerWidth() - 4 }), !1
}), 0 != $(".selectpicker").length && $(".selectpicker").selectpicker(), $('[rel="tooltip"]').tooltip(), $('[data-toggle="popover"]').popover();
var e = $(".tagsinput").data("color");
0 != $(".tagsinput").length && $(".tagsinput").tagsinput(), $(".bootstrap-tagsinput").addClass(e + "-badge"), $(".select").dropdown({ dropdownClass: "dropdown-menu", optionClass: "" }), $(".form-control").on("focus", function() { $(this).parent(".input-group").addClass("input-group-focus") }).on("blur", function() { $(this).parent(".input-group").removeClass("input-group-focus") }), 1 == breakCards && $('[data-header-animation="true"]').each(function() {
$(this);
var a = $(this).parent(".card");
a.find(".fix-broken-card").click(function() {
console.log(this);
var e = $(this).parent().parent().siblings(".card-header, .card-header-image");
e.removeClass("hinge").addClass("fadeInDown"), a.attr("data-count", 0), setTimeout(function() { e.removeClass("fadeInDown animate") }, 480)
}), a.mouseenter(function() {
var e = $(this);
hover_count = parseInt(e.attr("data-count"), 10) + 1 || 0, e.attr("data-count", hover_count), 20 <= hover_count && $(this).children(".card-header, .card-header-image").addClass("hinge animated")
})
}), $('input[type="checkbox"][required="true"], input[type="radio"][required="true"]').on("click", function() { $(this).hasClass("error") && $(this).closest("div").removeClass("has-error") })
}), $(document).on("click", ".navbar-toggler", function() {
if ($toggle = $(this), 1 == mobile_menu_visible) $("html").removeClass("nav-open"), $(".close-layer").remove(), setTimeout(function() { $toggle.removeClass("toggled") }, 400), mobile_menu_visible = 0;
else {
setTimeout(function() { $toggle.addClass("toggled") }, 430);
var e = $('<div class="close-layer"></div>');
0 != $("body").find(".main-panel").length ? e.appendTo(".main-panel") : $("body").hasClass("off-canvas-sidebar") && e.appendTo(".wrapper-full-page"), setTimeout(function() { e.addClass("visible") }, 100), e.click(function() { $("html").removeClass("nav-open"), mobile_menu_visible = 0, e.removeClass("visible"), setTimeout(function() { e.remove(), $toggle.removeClass("toggled") }, 400) }), $("html").addClass("nav-open"), mobile_menu_visible = 1
}
}), $(window).resize(function() { md.initSidebarsCheck(), seq = seq2 = 0, setTimeout(function() { md.initDashboardPageCharts() }, 500) }), md = {
misc: { navbar_menu_visible: 0, active_collapse: !0, disabled_collapse_init: 0 },
checkSidebarImage: function() { $sidebar = $(".sidebar"), image_src = $sidebar.data("image"), void 0 !== image_src && (sidebar_container = '<div class="sidebar-background" style="background-image: url(' + image_src + ') "/>', $sidebar.append(sidebar_container)) },
showNotification: function(e, a) { type = ["", "info", "danger", "success", "warning", "rose", "primary"], color = Math.floor(6 * Math.random() + 1), $.notify({ icon: "add_alert", message: "Welcome to <b>Material Dashboard Pro</b> - a beautiful admin panel for every web developer." }, { type: type[color], timer: 3e3, placement: { from: e, align: a } }) },
initDocumentationCharts: function() {
if (0 != $("#dailySalesChart").length && 0 != $("#websiteViewsChart").length) {
dataDailySalesChart = {
labels: ["M", "T", "W", "T", "F", "S", "S"],
series: [
[12, 17, 7, 17, 23, 18, 38]
|
},
initFormExtendedDatetimepickers: function() { $(".datetimepicker").datetimepicker({ icons: { time: "fa fa-clock-o", date: "fa fa-calendar", up: "fa fa-chevron-up", down: "fa fa-chevron-down", previous: "fa fa-chevron-left", next: "fa fa-chevron-right", today: "fa fa-screenshot", clear: "fa fa-trash", close: "fa fa-remove" } }), $(".datepicker").datetimepicker({ format: "MM/DD/YYYY", icons: { time: "fa fa-clock-o", date: "fa fa-calendar", up: "fa fa-chevron-up", down: "fa fa-chevron-down", previous: "fa fa-chevron-left", next: "fa fa-chevron-right", today: "fa fa-screenshot", clear: "fa fa-trash", close: "fa fa-remove" } }), $(".timepicker").datetimepicker({ format: "h:mm A", icons: { time: "fa fa-clock-o", date: "fa fa-calendar", up: "fa fa-chevron-up", down: "fa fa-chevron-down", previous: "fa fa-chevron-left", next: "fa fa-chevron-right", today: "fa fa-screenshot", clear: "fa fa-trash", close: "fa fa-remove" } }) },
initSliders: function() {
var e = document.getElementById("sliderRegular");
noUiSlider.create(e, { start: 40, connect: [!0, !1], range: { min: 0, max: 100 } });
var a = document.getElementById("sliderDouble");
noUiSlider.create(a, { start: [20, 60], connect: !0, range: { min: 0, max: 100 } })
},
initSidebarsCheck: function() { $(window).width() <= 991 && 0 != $sidebar.length && md.initRightMenu() },
initDashboardPageCharts: function() {
if (0 != $("#dailySalesChart").length || 0 != $("#completedTasksChart").length || 0 != $("#websiteViewsChart").length) {
dataDailySalesChart = {
labels: ["M", "T", "W", "T", "F", "S", "S"],
series: [
[12, 17, 7, 17, 23, 18, 38]
]
}, optionsDailySalesChart = { lineSmooth: Chartist.Interpolation.cardinal({ tension: 0 }), low: 0, high: 50, chartPadding: { top: 0, right: 0, bottom: 0, left: 0 } };
var e = new Chartist.Line("#dailySalesChart", dataDailySalesChart, optionsDailySalesChart);
md.startAnimationForLineChart(e), dataCompletedTasksChart = {
labels: ["12p", "3p", "6p", "9p", "12p", "3a", "6a", "9a"],
series: [
[230, 750, 450, 300, 280, 240, 200, 190]
]
}, optionsCompletedTasksChart = { lineSmooth: Chartist.Interpolation.cardinal({ tension: 0 }), low: 0, high: 1e3, chartPadding: { top: 0, right: 0, bottom: 0, left: 0 } };
var a = new Chartist.Line("#completedTasksChart", dataCompletedTasksChart, optionsCompletedTasksChart);
md.startAnimationForLineChart(a);
var t = Chartist.Bar("#websiteViewsChart", {
labels: ["J", "F", "M", "A", "M", "J", "J", "A", "S", "O", "N", "D"],
series: [
[542, 443, 320, 780, 553, 453, 326, 434, 568, 610, 756, 895]
]
}, { axisX: { showGrid: !1 }, low: 0, high: 1e3, chartPadding: { top: 0, right: 5, bottom: 0, left: 0 } }, [
["screen and (max-width: 640px)", { seriesBarDistance: 5, axisX: { labelInterpolationFnc: function(e) { return e[0] } } }]
]);
md.startAnimationForBarChart(t)
}
},
initMinimizeSidebar: function() {
$("#minimizeSidebar").click(function() {
$(this);
console.log(md.misc.sidebar_mini_active);
1 == md.misc.sidebar_mini_active ? ($("body").removeClass("sidebar-mini"), md.misc.sidebar_mini_active = !1) : ($("body").addClass("sidebar-mini"), md.misc.sidebar_mini_active = !0);
var e = setInterval(function() { window.dispatchEvent(new Event("resize")) }, 180);
setTimeout(function() { clearInterval(e) }, 1e3)
})
},
checkScrollForTransparentNavbar: debounce(function() { 260 < $(document).scrollTop() ? transparent && (transparent = !1, $(".navbar-color-on-scroll").removeClass("navbar-transparent")) : transparent || (transparent = !0, $(".navbar-color-on-scroll").addClass("navbar-transparent")) }, 17),
initRightMenu: debounce(function() { $sidebar_wrapper = $(".sidebar-wrapper"), mobile_menu_initialized ? 991 < $(window).width() && ($sidebar_wrapper.find(".navbar-form").remove(), $sidebar_wrapper.find(".nav-mobile-menu").remove(), mobile_menu_initialized = !1) : ($navbar = $("nav").find(".navbar-collapse").children(".navbar-nav"), mobile_menu_content = "", nav_content = $navbar.html(), nav_content = '<ul class="nav navbar-nav nav-mobile-menu">' + nav_content + "</ul>", navbar_form = $("nav").find(".navbar-form").get(0).outerHTML, $sidebar_nav = $sidebar_wrapper.find(" > .nav"), $nav_content = $(nav_content), $navbar_form = $(navbar_form), $nav_content.insertBefore($sidebar_nav), $navbar_form.insertBefore($nav_content), $(".sidebar-wrapper .dropdown .dropdown-menu > li > a").click(function(e) { e.stopPropagation() }), window.dispatchEvent(new Event("resize")), mobile_menu_initialized = !0) }, 200),
startAnimationForLineChart: function(e) { e.on("draw", function(e) { "line" === e.type || "area" === e.type ? e.element.animate({ d: { begin: 600, dur: 700, from: e.path.clone().scale(1, 0).translate(0, e.chartRect.height()).stringify(), to: e.path.clone().stringify(), easing: Chartist.Svg.Easing.easeOutQuint } }) : "point" === e.type && (seq++, e.element.animate({ opacity: { begin: seq * delays, dur: durations, from: 0, to: 1, easing: "ease" } })) }), seq = 0 },
startAnimationForBarChart: function(e) { e.on("draw", function(e) { "bar" === e.type && (seq2++, e.element.animate({ opacity: { begin: seq2 * delays2, dur: durations2, from: 0, to: 1, easing: "ease" } })) }), seq2 = 0 },
initFullCalendar: function() {
$calendar = $("#fullCalendar"), today = new Date, y = today.getFullYear(), m = today.getMonth(), d = today.getDate(), $calendar.fullCalendar({
viewRender: function(e, a) { "month" != e.name && $(a).find(".fc-scroller").perfectScrollbar() },
header: { left: "title", center: "month,agendaWeek,agendaDay", right: "prev,next,today" },
defaultDate: today,
selectable: !0,
selectHelper: !0,
views: { month: { titleFormat: "MMMM YYYY" }, week: { titleFormat: " MMMM D YYYY" }, day: { titleFormat: "D MMM, YYYY" } },
select: function(t, n) {
swal({ title: "Create an Event", html: '<div class="form-group"><input class="form-control" placeholder="Event Title" id="input-field"></div>', showCancelButton: !0, confirmButtonClass: "btn btn-success", cancelButtonClass: "btn btn-danger", buttonsStyling: !1 }).then(function(e) {
var a;
event_title = $("#input-field").val(), event_title && (a = { title: event_title, start: t, end: n }, $calendar.fullCalendar("renderEvent", a, !0)), $calendar.fullCalendar("unselect")
}).catch(swal.noop)
},
editable: !0,
eventLimit: !0,
events: [{ title: "All Day Event", start: new Date(y, m, 1), className: "event-default" }, { id: 999, title: "Repeating Event", start: new Date(y, m, d - 4, 6, 0), allDay: !1, className: "event-rose" }, { id: 999, title: "Repeating Event", start: new Date(y, m, d + 3, 6, 0), allDay: !1, className: "event-rose" }, { title: "Meeting", start: new Date(y, m, d - 1, 10, 30), allDay: !1, className: "event-green" }, { title: "Lunch", start: new Date(y, m, d + 7, 12, 0), end: new Date(y, m, d + 7, 14, 0), allDay: !1, className: "event-red" }, { title: "Md-pro Launch", start: new Date(y, m, d - 2, 12, 0), allDay: !0, className: "event-azure" }, { title: "Birthday Party", start: new Date(y, m, d + 1, 19, 0), end: new Date(y, m, d + 1, 22, 30), allDay: !1, className: "event-azure" }, { title: "Click for Creative Tim", start: new Date(y, m, 21), end: new Date(y, m, 22), url: "http://www.creative-tim.com/", className: "event-orange" }, { title: "Click for Google", start: new Date(y, m, 21), end: new Date(y, m, 22), url: "http://www.creative-tim.com/", className: "event-orange" }]
})
},
initVectorMap: function() { $("#worldMap").vectorMap({ map: "world_mill_en", backgroundColor: "transparent", zoomOnScroll: !1, regionStyle: { initial: { fill: "#e4e4e4", "fill-opacity": .9, stroke: "none", "stroke-width": 0, "stroke-opacity": 0 } }, series: { regions: [{ values: { AU: 760, BR: 550, CA: 120, DE: 1300, FR: 540, GB: 690, GE: 200, IN: 200, RO: 600, RU: 300, US: 2920 }, scale: ["#AAAAAA", "#444444"], normalizeFunction: "polynomial" }] } }) }
};
//# sourceMappingURL=_site_dashboard_pro/assets/js/dashboard-pro.js.map
|
]
}, optionsDailySalesChart = { lineSmooth: Chartist.Interpolation.cardinal({ tension: 0 }), low: 0, high: 50, chartPadding: { top: 0, right: 0, bottom: 0, left: 0 } };
new Chartist.Line("#dailySalesChart", dataDailySalesChart, optionsDailySalesChart), new Chartist.Line("#websiteViewsChart", dataDailySalesChart, optionsDailySalesChart)
}
|
lsm303agr.rs
|
//! SyscallDriver for the LSM303AGR 3D accelerometer and 3D magnetometer sensor.
//!
//! May be used with NineDof and Temperature
//!
//! I2C Interface
//!
//! <https://www.st.com/en/mems-and-sensors/lsm303agr.html>
//!
//! The syscall interface is described in
//! [lsm303dlhc.md](https://github.com/tock/tock/tree/master/doc/syscalls/70006_lsm303dlhc.md)
//!
//! Usage
//! -----
//!
//! ```rust
//! let mux_i2c = components::i2c::I2CMuxComponent::new(&stm32f3xx::i2c::I2C1)
//! .finalize(components::i2c_mux_component_helper!());
//!
//! let lsm303dlhc = components::lsm303dlhc::Lsm303agrI2CComponent::new()
//! .finalize(components::lsm303dlhc_i2c_component_helper!(mux_i2c));
//!
//! lsm303dlhc.configure(
//! lsm303dlhc::Lsm303AccelDataRate::DataRate25Hz,
//! false,
//! lsm303dlhc::Lsm303Scale::Scale2G,
//! false,
//! true,
//! lsm303dlhc::Lsm303MagnetoDataRate::DataRate3_0Hz,
//! lsm303dlhc::Lsm303Range::Range4_7G,
//!);
//! ```
//!
//! NideDof Example
//!
//! ```rust
//! let grant_cap = create_capability!(capabilities::MemoryAllocationCapability);
//! let grant_ninedof = board_kernel.create_grant(&grant_cap);
//!
//! // use as primary NineDof Sensor
//! let ninedof = static_init!(
//! capsules::ninedof::NineDof<'static>,
//! capsules::ninedof::NineDof::new(lsm303dlhc, grant_ninedof)
//! );
//!
//! hil::sensors::NineDof::set_client(lsm303dlhc, ninedof);
//!
//! // use as secondary NineDof Sensor
//! let lsm303dlhc_secondary = static_init!(
//! capsules::ninedof::NineDofNode<'static, &'static dyn hil::sensors::NineDof>,
//! capsules::ninedof::NineDofNode::new(lsm303dlhc)
//! );
//! ninedof.add_secondary_driver(lsm303dlhc_secondary);
//! hil::sensors::NineDof::set_client(lsm303dlhc, ninedof);
//! ```
//!
//! Temperature Example
//!
//! ```rust
//! let grant_cap = create_capability!(capabilities::MemoryAllocationCapability);
//! let grant_temp = board_kernel.create_grant(&grant_cap);
//!
//! lsm303dlhc.configure(
//! lsm303dlhc::Lsm303AccelDataRate::DataRate25Hz,
//! false,
//! lsm303dlhc::Lsm303Scale::Scale2G,
//! false,
//! true,
//! lsm303dlhc::Lsm303MagnetoDataRate::DataRate3_0Hz,
//! lsm303dlhc::Lsm303Range::Range4_7G,
//!);
//! let temp = static_init!(
//! capsules::temperature::TemperatureSensor<'static>,
//! capsules::temperature::TemperatureSensor::new(lsm303dlhc, grant_temperature));
//! kernel::hil::sensors::TemperatureDriver::set_client(lsm303dlhc, temp);
//! ```
//!
//! Author: Alexandru Radovici <[email protected]>
//!
#![allow(non_camel_case_types)]
use core::cell::Cell;
use enum_primitive::cast::FromPrimitive;
use enum_primitive::enum_from_primitive;
use kernel::grant::Grant;
use kernel::hil::i2c;
use kernel::hil::sensors;
use kernel::syscall::{CommandReturn, SyscallDriver};
use kernel::utilities::cells::{OptionalCell, TakeCell};
use kernel::{ErrorCode, ProcessId};
use crate::driver;
use crate::lsm303xx::{
AccelerometerRegisters, Lsm303AccelDataRate, Lsm303MagnetoDataRate, Lsm303Range, Lsm303Scale,
CTRL_REG1, CTRL_REG4, RANGE_FACTOR_X_Y, RANGE_FACTOR_Z, SCALE_FACTOR,
};
/// Syscall driver number.
pub const DRIVER_NUM: usize = driver::NUM::Lsm303dlch as usize;
/// Register values
const REGISTER_AUTO_INCREMENT: u8 = 0x80;
enum_from_primitive! {
pub enum AgrAccelerometerRegisters {
TEMP_OUT_H_A = 0x0C,
TEMP_OUT_L_A = 0x0D
}
}
enum_from_primitive! {
enum MagnetometerRegisters {
CRA_REG_M = 0x60,
CRB_REG_M = 0x61,
OUT_X_H_M = 0x68,
OUT_X_L_M = 0x69,
OUT_Z_H_M = 0x6A,
OUT_Z_L_M = 0x6B,
OUT_Y_H_M = 0x6C,
OUT_Y_L_M = 0x6D,
}
}
#[derive(Clone, Copy, PartialEq)]
enum State {
Idle,
IsPresent,
SetPowerMode,
SetScaleAndResolution,
ReadAccelerationXYZ,
SetDataRate,
// SetTemperature,
SetRange,
ReadTemperature,
ReadMagnetometerXYZ,
}
#[derive(Default)]
pub struct App {}
pub struct Lsm303agrI2C<'a> {
config_in_progress: Cell<bool>,
i2c_accelerometer: &'a dyn i2c::I2CDevice,
i2c_magnetometer: &'a dyn i2c::I2CDevice,
state: Cell<State>,
accel_scale: Cell<Lsm303Scale>,
mag_range: Cell<Lsm303Range>,
accel_high_resolution: Cell<bool>,
mag_data_rate: Cell<Lsm303MagnetoDataRate>,
accel_data_rate: Cell<Lsm303AccelDataRate>,
low_power: Cell<bool>,
temperature: Cell<bool>,
buffer: TakeCell<'static, [u8]>,
nine_dof_client: OptionalCell<&'a dyn sensors::NineDofClient>,
temperature_client: OptionalCell<&'a dyn sensors::TemperatureClient>,
apps: Grant<App, 1>,
owning_process: OptionalCell<ProcessId>,
}
impl<'a> Lsm303agrI2C<'a> {
pub fn new(
i2c_accelerometer: &'a dyn i2c::I2CDevice,
i2c_magnetometer: &'a dyn i2c::I2CDevice,
buffer: &'static mut [u8],
grant: Grant<App, 1>,
) -> Lsm303agrI2C<'a> {
// setup and return struct
Lsm303agrI2C {
config_in_progress: Cell::new(false),
i2c_accelerometer: i2c_accelerometer,
i2c_magnetometer: i2c_magnetometer,
state: Cell::new(State::Idle),
accel_scale: Cell::new(Lsm303Scale::Scale2G),
mag_range: Cell::new(Lsm303Range::Range1G),
accel_high_resolution: Cell::new(false),
mag_data_rate: Cell::new(Lsm303MagnetoDataRate::DataRate0_75Hz),
accel_data_rate: Cell::new(Lsm303AccelDataRate::DataRate1Hz),
low_power: Cell::new(false),
temperature: Cell::new(false),
buffer: TakeCell::new(buffer),
nine_dof_client: OptionalCell::empty(),
temperature_client: OptionalCell::empty(),
apps: grant,
owning_process: OptionalCell::empty(),
}
}
pub fn configure(
&self,
accel_data_rate: Lsm303AccelDataRate,
low_power: bool,
accel_scale: Lsm303Scale,
accel_high_resolution: bool,
temperature: bool,
mag_data_rate: Lsm303MagnetoDataRate,
mag_range: Lsm303Range,
) -> Result<(), ErrorCode> {
if self.state.get() == State::Idle {
self.config_in_progress.set(true);
self.accel_scale.set(accel_scale);
self.accel_high_resolution.set(accel_high_resolution);
self.temperature.set(temperature);
self.mag_data_rate.set(mag_data_rate);
self.mag_range.set(mag_range);
self.accel_data_rate.set(accel_data_rate);
self.low_power.set(low_power);
self.set_power_mode(accel_data_rate, low_power)
} else {
Err(ErrorCode::BUSY)
}
}
fn is_present(&self) -> Result<(), ErrorCode> {
if self.state.get() != State::Idle {
self.state.set(State::IsPresent);
self.buffer.take().map_or(Err(ErrorCode::NOMEM), |buf| {
// turn on i2c to send commands
buf[0] = 0x0F;
self.i2c_magnetometer.enable();
if let Err((error, buf)) = self.i2c_magnetometer.write_read(buf, 1, 1) {
self.state.set(State::Idle);
self.buffer.replace(buf);
self.i2c_magnetometer.disable();
Err(error.into())
} else {
Ok(())
}
})
} else {
Err(ErrorCode::BUSY)
}
}
fn set_power_mode(
&self,
data_rate: Lsm303AccelDataRate,
low_power: bool,
) -> Result<(), ErrorCode> {
if self.state.get() == State::Idle {
self.state.set(State::SetPowerMode);
self.buffer.take().map_or(Err(ErrorCode::NOMEM), |buf| {
buf[0] = AccelerometerRegisters::CTRL_REG1 as u8;
buf[1] = (CTRL_REG1::ODR.val(data_rate as u8)
+ CTRL_REG1::LPEN.val(low_power as u8)
+ CTRL_REG1::ZEN::SET
+ CTRL_REG1::YEN::SET
+ CTRL_REG1::XEN::SET)
.value;
self.i2c_accelerometer.enable();
if let Err((error, buf)) = self.i2c_accelerometer.write(buf, 2) {
self.state.set(State::Idle);
self.i2c_accelerometer.disable();
self.buffer.replace(buf);
Err(error.into())
} else {
Ok(())
}
})
} else {
Err(ErrorCode::BUSY)
}
}
fn set_scale_and_resolution(
&self,
scale: Lsm303Scale,
high_resolution: bool,
) -> Result<(), ErrorCode> {
if self.state.get() == State::Idle {
self.state.set(State::SetScaleAndResolution);
// TODO move these in completed
self.accel_scale.set(scale);
self.accel_high_resolution.set(high_resolution);
self.buffer.take().map_or(Err(ErrorCode::NOMEM), |buf| {
buf[0] = AccelerometerRegisters::CTRL_REG4 as u8;
buf[1] = (CTRL_REG4::FS.val(scale as u8)
+ CTRL_REG4::HR.val(high_resolution as u8)
+ CTRL_REG4::BDU::SET)
.value;
self.i2c_accelerometer.enable();
if let Err((error, buf)) = self.i2c_accelerometer.write(buf, 2) {
self.state.set(State::Idle);
self.i2c_accelerometer.disable();
self.buffer.replace(buf);
Err(error.into())
} else {
Ok(())
}
})
} else {
Err(ErrorCode::BUSY)
}
}
fn read_acceleration_xyz(&self) -> Result<(), ErrorCode> {
if self.state.get() == State::Idle {
self.state.set(State::ReadAccelerationXYZ);
self.buffer.take().map_or(Err(ErrorCode::NOMEM), |buf| {
buf[0] = AccelerometerRegisters::OUT_X_L_A as u8 | REGISTER_AUTO_INCREMENT;
self.i2c_accelerometer.enable();
if let Err((error, buf)) = self.i2c_accelerometer.write_read(buf, 1, 6) {
self.state.set(State::Idle);
self.buffer.replace(buf);
self.i2c_accelerometer.disable();
Err(error.into())
} else {
Ok(())
}
})
} else {
Err(ErrorCode::BUSY)
}
}
fn set_magneto_data_rate(&self, data_rate: Lsm303MagnetoDataRate) -> Result<(), ErrorCode> {
if self.state.get() == State::Idle {
self.state.set(State::SetDataRate);
self.buffer.take().map_or(Err(ErrorCode::NOMEM), |buf| {
buf[0] = MagnetometerRegisters::CRA_REG_M as u8;
buf[1] = ((data_rate as u8) << 2) | 1 << 7;
self.i2c_magnetometer.enable();
if let Err((error, buf)) = self.i2c_magnetometer.write(buf, 2) {
self.state.set(State::Idle);
self.i2c_magnetometer.disable();
self.buffer.replace(buf);
Err(error.into())
} else {
Ok(())
}
})
} else {
Err(ErrorCode::BUSY)
}
}
fn set_range(&self, range: Lsm303Range) -> Result<(), ErrorCode> {
if self.state.get() == State::Idle {
self.state.set(State::SetRange);
self.mag_range.set(range);
self.buffer.take().map_or(Err(ErrorCode::NOMEM), |buf| {
buf[0] = MagnetometerRegisters::CRB_REG_M as u8;
buf[1] = (range as u8) << 5;
buf[2] = 0;
self.i2c_magnetometer.enable();
if let Err((error, buf)) = self.i2c_magnetometer.write(buf, 3) {
self.state.set(State::Idle);
self.i2c_magnetometer.disable();
self.buffer.replace(buf);
Err(error.into())
} else {
Ok(())
}
})
} else {
Err(ErrorCode::BUSY)
}
}
fn read_temperature(&self) -> Result<(), ErrorCode> {
if self.state.get() == State::Idle {
self.state.set(State::ReadTemperature);
self.buffer.take().map_or(Err(ErrorCode::NOMEM), |buf| {
buf[0] = AgrAccelerometerRegisters::TEMP_OUT_H_A as u8;
self.i2c_accelerometer.enable();
if let Err((error, buf)) = self.i2c_accelerometer.write_read(buf, 1, 2) {
self.state.set(State::Idle);
self.i2c_accelerometer.disable();
self.buffer.replace(buf);
Err(error.into())
} else {
Ok(())
}
})
} else {
Err(ErrorCode::BUSY)
}
}
fn read_magnetometer_xyz(&self) -> Result<(), ErrorCode> {
if self.state.get() == State::Idle {
self.state.set(State::ReadMagnetometerXYZ);
self.buffer.take().map_or(Err(ErrorCode::NOMEM), |buf| {
buf[0] = MagnetometerRegisters::OUT_X_H_M as u8;
self.i2c_magnetometer.enable();
if let Err((error, buf)) = self.i2c_magnetometer.write_read(buf, 1, 6) {
self.state.set(State::Idle);
self.i2c_magnetometer.disable();
self.buffer.replace(buf);
Err(error.into())
} else {
Ok(())
}
})
} else {
Err(ErrorCode::BUSY)
}
}
}
impl i2c::I2CClient for Lsm303agrI2C<'_> {
fn command_complete(&self, buffer: &'static mut [u8], status: Result<(), i2c::Error>) {
match self.state.get() {
State::IsPresent => {
let present = if status == Ok(()) && buffer[0] == 60
|
else {
false
};
self.owning_process.map(|pid| {
let _res = self.apps.enter(*pid, |_app, upcalls| {
upcalls
.schedule_upcall(0, (if present { 1 } else { 0 }, 0, 0))
.ok();
});
});
self.buffer.replace(buffer);
self.i2c_magnetometer.disable();
self.state.set(State::Idle);
}
State::SetPowerMode => {
let set_power = status == Ok(());
self.owning_process.map(|pid| {
let _res = self.apps.enter(*pid, |_app, upcalls| {
upcalls
.schedule_upcall(0, (if set_power { 1 } else { 0 }, 0, 0))
.ok();
});
});
self.buffer.replace(buffer);
self.i2c_accelerometer.disable();
self.state.set(State::Idle);
if self.config_in_progress.get() {
if let Err(_error) = self.set_scale_and_resolution(
self.accel_scale.get(),
self.accel_high_resolution.get(),
) {
self.config_in_progress.set(false);
}
}
}
State::SetScaleAndResolution => {
let set_scale_and_resolution = status == Ok(());
self.owning_process.map(|pid| {
let _res = self.apps.enter(*pid, |_app, upcalls| {
upcalls
.schedule_upcall(
0,
(if set_scale_and_resolution { 1 } else { 0 }, 0, 0),
)
.ok();
});
});
self.buffer.replace(buffer);
self.i2c_accelerometer.disable();
self.state.set(State::Idle);
if self.config_in_progress.get() {
if let Err(_error) = self.set_magneto_data_rate(self.mag_data_rate.get()) {
self.config_in_progress.set(false);
}
}
}
State::ReadAccelerationXYZ => {
let mut x: usize = 0;
let mut y: usize = 0;
let mut z: usize = 0;
let values = if status == Ok(()) {
self.nine_dof_client.map(|client| {
// compute using only integers
let scale_factor = self.accel_scale.get() as usize;
x = (((buffer[0] as i16 | ((buffer[1] as i16) << 8)) as i32)
* (SCALE_FACTOR[scale_factor] as i32)
* 1000
/ 32768) as usize;
y = (((buffer[2] as i16 | ((buffer[3] as i16) << 8)) as i32)
* (SCALE_FACTOR[scale_factor] as i32)
* 1000
/ 32768) as usize;
z = (((buffer[4] as i16 | ((buffer[5] as i16) << 8)) as i32)
* (SCALE_FACTOR[scale_factor] as i32)
* 1000
/ 32768) as usize;
client.callback(x, y, z);
});
x = (buffer[0] as i16 | ((buffer[1] as i16) << 8)) as usize;
y = (buffer[2] as i16 | ((buffer[3] as i16) << 8)) as usize;
z = (buffer[4] as i16 | ((buffer[5] as i16) << 8)) as usize;
true
} else {
self.nine_dof_client.map(|client| {
client.callback(0, 0, 0);
});
false
};
self.owning_process.map(|pid| {
let _res = self.apps.enter(*pid, |_app, upcalls| {
if values {
upcalls.schedule_upcall(0, (x, y, z)).ok();
} else {
upcalls.schedule_upcall(0, (0, 0, 0)).ok();
}
});
});
self.buffer.replace(buffer);
self.i2c_accelerometer.disable();
self.state.set(State::Idle);
}
State::SetDataRate => {
let set_magneto_data_rate = status == Ok(());
self.owning_process.map(|pid| {
let _res = self.apps.enter(*pid, |_app, upcalls| {
upcalls
.schedule_upcall(0, (if set_magneto_data_rate { 1 } else { 0 }, 0, 0))
.ok();
});
});
self.buffer.replace(buffer);
self.i2c_magnetometer.disable();
self.state.set(State::Idle);
if self.config_in_progress.get() {
if let Err(_error) = self.set_range(self.mag_range.get()) {
self.config_in_progress.set(false);
}
}
}
State::SetRange => {
let set_range = status == Ok(());
self.owning_process.map(|pid| {
let _res = self.apps.enter(*pid, |_app, upcalls| {
upcalls
.schedule_upcall(0, (if set_range { 1 } else { 0 }, 0, 0))
.ok();
});
});
if self.config_in_progress.get() {
self.config_in_progress.set(false);
}
self.buffer.replace(buffer);
self.i2c_magnetometer.disable();
self.state.set(State::Idle);
}
State::ReadTemperature => {
let mut temp: usize = 0;
let values = if status == Ok(()) {
temp = (buffer[1] as u16 as i16 | ((buffer[0] as i16) << 8)) as usize;
self.temperature_client.map(|client| {
client.callback((temp as i16 / 8) as usize);
});
true
} else {
self.temperature_client.map(|client| {
client.callback(usize::MAX);
});
false
};
self.owning_process.map(|pid| {
let _res = self.apps.enter(*pid, |_app, upcalls| {
if values {
upcalls.schedule_upcall(0, (temp, 0, 0)).ok();
} else {
upcalls.schedule_upcall(0, (0, 0, 0)).ok();
}
});
});
self.buffer.replace(buffer);
self.i2c_accelerometer.disable();
self.state.set(State::Idle);
}
State::ReadMagnetometerXYZ => {
let mut x: usize = 0;
let mut y: usize = 0;
let mut z: usize = 0;
let values = if status == Ok(()) {
self.nine_dof_client.map(|client| {
// compute using only integers
let range = self.mag_range.get() as usize;
x = (((buffer[1] as i16 | ((buffer[0] as i16) << 8)) as i32) * 100
/ RANGE_FACTOR_X_Y[range] as i32) as usize;
z = (((buffer[3] as i16 | ((buffer[2] as i16) << 8)) as i32) * 100
/ RANGE_FACTOR_X_Y[range] as i32) as usize;
y = (((buffer[5] as i16 | ((buffer[4] as i16) << 8)) as i32) * 100
/ RANGE_FACTOR_Z[range] as i32) as usize;
client.callback(x, y, z);
});
x = ((buffer[1] as u16 | ((buffer[0] as u16) << 8)) as i16) as usize;
z = ((buffer[3] as u16 | ((buffer[2] as u16) << 8)) as i16) as usize;
y = ((buffer[5] as u16 | ((buffer[4] as u16) << 8)) as i16) as usize;
true
} else {
self.nine_dof_client.map(|client| {
client.callback(0, 0, 0);
});
false
};
self.owning_process.map(|pid| {
let _res = self.apps.enter(*pid, |_app, upcalls| {
if values {
upcalls.schedule_upcall(0, (x, y, z)).ok();
} else {
upcalls.schedule_upcall(0, (0, 0, 0)).ok();
}
});
});
self.buffer.replace(buffer);
self.i2c_magnetometer.disable();
self.state.set(State::Idle);
}
_ => {
self.i2c_magnetometer.disable();
self.i2c_accelerometer.disable();
self.buffer.replace(buffer);
}
}
}
}
impl SyscallDriver for Lsm303agrI2C<'_> {
fn command(
&self,
command_num: usize,
data1: usize,
data2: usize,
process_id: ProcessId,
) -> CommandReturn {
if command_num == 0 {
// Handle this first as it should be returned
// unconditionally
return CommandReturn::success();
}
let match_or_empty_or_nonexistant = self.owning_process.map_or(true, |current_process| {
self.apps
.enter(*current_process, |_, _| current_process == &process_id)
.unwrap_or(true)
});
if match_or_empty_or_nonexistant {
self.owning_process.set(process_id);
} else {
return CommandReturn::failure(ErrorCode::RESERVE);
}
match command_num {
// Check is sensor is correctly connected
1 => {
if self.state.get() == State::Idle {
match self.is_present() {
Ok(()) => CommandReturn::success(),
Err(error) => CommandReturn::failure(error),
}
} else {
CommandReturn::failure(ErrorCode::BUSY)
}
}
// Set Accelerometer Power Mode
2 => {
if self.state.get() == State::Idle {
if let Some(data_rate) = Lsm303AccelDataRate::from_usize(data1) {
match self.set_power_mode(data_rate, if data2 != 0 { true } else { false })
{
Ok(()) => CommandReturn::success(),
Err(error) => CommandReturn::failure(error),
}
} else {
CommandReturn::failure(ErrorCode::INVAL)
}
} else {
CommandReturn::failure(ErrorCode::BUSY)
}
}
// Set Accelerometer Scale And Resolution
3 => {
if self.state.get() == State::Idle {
if let Some(scale) = Lsm303Scale::from_usize(data1) {
match self
.set_scale_and_resolution(scale, if data2 != 0 { true } else { false })
{
Ok(()) => CommandReturn::success(),
Err(error) => CommandReturn::failure(error),
}
} else {
CommandReturn::failure(ErrorCode::INVAL)
}
} else {
CommandReturn::failure(ErrorCode::BUSY)
}
}
// Set Magnetometer Temperature Enable and Data Rate
4 => {
if self.state.get() == State::Idle {
if let Some(data_rate) = Lsm303MagnetoDataRate::from_usize(data1) {
match self.set_magneto_data_rate(data_rate) {
Ok(()) => CommandReturn::success(),
Err(error) => CommandReturn::failure(error),
}
} else {
CommandReturn::failure(ErrorCode::INVAL)
}
} else {
CommandReturn::failure(ErrorCode::BUSY)
}
}
// Set Magnetometer Range
5 => {
if self.state.get() == State::Idle {
if let Some(range) = Lsm303Range::from_usize(data1) {
match self.set_range(range) {
Ok(()) => CommandReturn::success(),
Err(error) => CommandReturn::failure(error),
}
} else {
CommandReturn::failure(ErrorCode::INVAL)
}
} else {
CommandReturn::failure(ErrorCode::BUSY)
}
}
// default
_ => CommandReturn::failure(ErrorCode::NOSUPPORT),
}
}
fn allocate_grant(&self, processid: ProcessId) -> Result<(), kernel::process::Error> {
self.apps.enter(processid, |_, _| {})
}
}
impl<'a> sensors::NineDof<'a> for Lsm303agrI2C<'a> {
fn set_client(&self, nine_dof_client: &'a dyn sensors::NineDofClient) {
self.nine_dof_client.replace(nine_dof_client);
}
fn read_accelerometer(&self) -> Result<(), ErrorCode> {
self.read_acceleration_xyz()
}
fn read_magnetometer(&self) -> Result<(), ErrorCode> {
self.read_magnetometer_xyz()
}
}
impl<'a> sensors::TemperatureDriver<'a> for Lsm303agrI2C<'a> {
fn set_client(&self, temperature_client: &'a dyn sensors::TemperatureClient) {
self.temperature_client.replace(temperature_client);
}
fn read_temperature(&self) -> Result<(), ErrorCode> {
self.read_temperature()
}
}
|
{
true
}
|
ChatMessage.tsx
|
import React from 'react'
import {View, StyleSheet, Image, ImageSourcePropType, TouchableOpacity} from 'react-native'
import {observer} from 'mobx-react'
import {IMessage, IWocky, IProfile, IFile, MessageStatus} from 'src/wocky'
import {RText, Avatar, Spinner} from '../common'
import Triangle from '../map/Triangle'
import {width} from '../Global'
import {colors} from 'src/constants'
type Props = {
message: IMessage
diffMessage: IMessage | null
wocky?: IWocky
}
const lightPink = 'rgb(255, 228, 231)'
const pink = 'rgb(254, 173, 181)'
const triangleSize = 12
type StatusProps = {
isImage: boolean
status: MessageStatus
send: () => Promise<void>
}
const StatusText = ({isImage, status, send}: StatusProps) => {
if (status === MessageStatus.Sending) {
return <RText style={styles.statusText}>Sending...</RText>
}
if (status === MessageStatus.Error) {
return (
<TouchableOpacity onPress={() => send()}>
<RText style={styles.statusText}>
{isImage ? 'Image not sent.' : 'Message not sent'} Tap to retry.
</RText>
</TouchableOpacity>
)
}
return null
}
const ChatMessageWrapper = observer(
({message: {isOutgoing, getUpload, content, otherUser, status, send}}: Props) => {
const left = !isOutgoing
// NOTE: since Messages can have both image + text we need to render them as "separate" messages here
return (
<View>
{!!getUpload && (
<ChatMessage left={left} media={getUpload} otherUser={otherUser} status={status} />
)}
{!!content && (
<ChatMessage left={left} text={content} otherUser={otherUser} status={status} />
)}
<StatusText isImage={!!getUpload} status={status} send={send} />
</View>
)
}
)
const ChatMessage = ({
left,
media,
text,
otherUser,
status,
}: {
left: boolean
media?: IFile
text?: string
otherUser: IProfile
status: MessageStatus
}) => {
// TODO: media.loading doesn't work well here...there's a delay before `loading` gets set
|
const color = media && !media.thumbnail ? colors.GREY : left ? lightPink : pink
const triangleStyle = left ? {left: -triangleSize} : {right: -triangleSize}
return (
<View
style={[
styles.rowContainer,
{
justifyContent: left ? 'flex-start' : 'flex-end',
paddingRight: left ? 0 : triangleSize,
},
]}
>
{left && <Avatar size={40} profile={otherUser} style={{marginRight: 10}} tappable={false} />}
<View style={{flex: 1, alignItems: left ? 'flex-start' : 'flex-end'}}>
{!!media ? (
<ImageMessage media={media} left={left} color={color} status={status} />
) : (
<RText
style={[
styles.bubble,
{
backgroundColor: color,
borderColor: color,
},
]}
size={15}
>
{text}
</RText>
)}
<Triangle
width={triangleSize}
height={triangleSize}
color={color}
direction={left ? 'left' : 'right'}
style={{position: 'absolute', bottom: triangleSize, ...triangleStyle}}
/>
</View>
</View>
)
}
const ImageMessage = observer(
({media, color, status}: {media: IFile; left: boolean; color: string; status: MessageStatus}) => {
if (media && !media.thumbnail) {
return (
<View
style={[
styles.bubble,
{
backgroundColor: color,
borderColor: color,
width: 80,
alignItems: 'center',
},
]}
>
<Spinner color="white" />
</View>
)
}
const image = media.thumbnail as ImageSourcePropType
const {width: iWidth, height: iHeight} = Image.resolveAssetSource(image)
const maxDim = width * 0.75
const dimensions =
iWidth / iHeight < 1
? {height: maxDim, width: (maxDim / iHeight) * iWidth}
: {width: maxDim, height: (maxDim / iWidth) * iHeight}
return (
<View>
<Image
style={[
styles.bubble,
styles.mediaBubble,
dimensions,
{
backgroundColor: color,
borderColor: color,
},
]}
resizeMode="contain"
source={image as ImageSourcePropType}
/>
{status === MessageStatus.Sending && (
<View style={styles.overlay}>
<Spinner color="white" />
</View>
)}
{status === MessageStatus.Error && (
<View style={styles.overlay}>
<Image source={require('../../../images/uploadError.png')} />
</View>
)}
</View>
)
}
)
export default ChatMessageWrapper
const styles = StyleSheet.create({
overlay: {
position: 'absolute',
alignItems: 'center',
justifyContent: 'center',
left: 0,
right: 0,
top: 0,
bottom: 0,
backgroundColor: 'rgba(255, 255, 255, 0.8)',
},
mediaBubble: {
padding: 0,
},
bubble: {
borderRadius: 8,
padding: 12,
overflow: 'hidden',
borderWidth: 1,
},
rowContainer: {
flexDirection: 'row',
marginBottom: 10,
},
statusText: {
textAlign: 'right',
marginTop: -5,
marginBottom: 3,
fontSize: 13,
color: colors.DARK_GREY,
},
})
| |
main.go
|
package main
import (
"os"
"github.com/appscode/go-seed-cli/cmds"
logs "github.com/appscode/log/golog"
)
func main()
|
{
logs.InitLogs()
defer logs.FlushLogs()
if err := cmds.NewRootCmd(Version).Execute(); err != nil {
os.Exit(1)
}
os.Exit(0)
}
|
|
rest_devicereport.go
|
/*******************************************************************************
* Copyright 2017 Dell Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*
* @microservice: core-metadata-go service
* @author: Spencer Bull & Ryan Comer, Dell
* @version: 0.5.0
*******************************************************************************/
package metadata
import (
"encoding/json"
"errors"
"net/http"
"net/url"
"github.com/edgexfoundry/edgex-go/core/domain/models"
"github.com/gorilla/mux"
mgo "gopkg.in/mgo.v2"
)
func restGetAllDeviceReports(w http.ResponseWriter, _ *http.Request) {
res := make([]models.DeviceReport, 0)
err := getAllDeviceReports(&res)
if err != nil {
loggingClient.Error(err.Error(), "")
http.Error(w, err.Error(), http.StatusServiceUnavailable)
return
}
// Check max limit
if len(res) > configuration.ReadMaxLimit {
err = errors.New("Max limit exceeded")
http.Error(w, err.Error(), http.StatusRequestEntityTooLarge)
loggingClient.Error(err.Error(), "")
return
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(&res)
}
// Add a new device report
// Referenced objects (Device, Schedule event) must already exist
// 404 If any of the referenced objects aren't found
func restAddDeviceReport(w http.ResponseWriter, r *http.Request) {
defer r.Body.Close()
var dr models.DeviceReport
if err := json.NewDecoder(r.Body).Decode(&dr); err != nil {
loggingClient.Error(err.Error(), "")
http.Error(w, err.Error(), http.StatusServiceUnavailable)
return
}
// Check if the device exists
var d models.Device
if err := getDeviceByName(&d, dr.Device); err != nil {
if err == mgo.ErrNotFound {
http.Error(w, "Device referenced by Device Report doesn't exist", http.StatusNotFound)
} else {
http.Error(w, err.Error(), http.StatusServiceUnavailable)
}
loggingClient.Error(err.Error(), "")
return
}
// Check if the Schedule Event exists
var se models.ScheduleEvent
if err := getScheduleEventByName(&se, dr.Event); err != nil {
if err == mgo.ErrNotFound {
http.Error(w, "Schedule Event referenced by Device Report doesn't exist", http.StatusNotFound)
} else {
http.Error(w, err.Error(), http.StatusServiceUnavailable)
}
loggingClient.Error(err.Error(), "")
return
}
// Add the device report
if err := addDeviceReport(&dr); err != nil {
if err == ErrDuplicateName {
http.Error(w, "Duplicate Name for the device report", http.StatusConflict)
} else {
http.Error(w, err.Error(), http.StatusServiceUnavailable)
}
loggingClient.Error(err.Error(), "")
return
}
// Notify associates
if err := notifyDeviceReportAssociates(dr, http.MethodPost); err != nil {
loggingClient.Error(err.Error(), "")
}
w.WriteHeader(http.StatusOK)
w.Write([]byte(dr.Id.Hex()))
}
func restUpdateDeviceReport(w http.ResponseWriter, r *http.Request) {
defer r.Body.Close()
var from models.DeviceReport
if err := json.NewDecoder(r.Body).Decode(&from); err != nil {
loggingClient.Error(err.Error(), "")
http.Error(w, err.Error(), http.StatusServiceUnavailable)
return
}
// Check if the device report exists
var to models.DeviceReport
// First try ID
if err := getDeviceReportById(&to, from.Id.Hex()); err != nil {
// Try by name
if err = getDeviceReportByName(&to, from.Name); err != nil {
if err == mgo.ErrNotFound {
http.Error(w, err.Error(), http.StatusNotFound)
} else {
http.Error(w, err.Error(), http.StatusServiceUnavailable)
}
loggingClient.Error(err.Error(), "")
return
}
}
if err := updateDeviceReportFields(from, &to, w); err != nil {
loggingClient.Error(err.Error(), "")
return
}
if err := updateDeviceReport(&to); err != nil {
loggingClient.Error(err.Error(), "")
http.Error(w, err.Error(), http.StatusServiceUnavailable)
return
}
// Notify Associates
if err := notifyDeviceReportAssociates(to, http.MethodPut); err != nil {
loggingClient.Error(err.Error(), "")
}
w.WriteHeader(http.StatusOK)
w.Write([]byte("true"))
}
// Update the relevant fields for the device report
func updateDeviceReportFields(from models.DeviceReport, to *models.DeviceReport, w http.ResponseWriter) error {
if from.Device != "" {
to.Device = from.Device
if err := validateDevice(to.Device, w); err != nil {
return err
}
}
if from.Event != "" {
to.Event = from.Event
if err := validateEvent(to.Event, w); err != nil {
return err
}
}
if from.Expected != nil {
to.Expected = from.Expected
// TODO: Someday find a way to check the value descriptors
}
if from.Name != "" {
to.Name = from.Name
}
if from.Origin != 0 {
to.Origin = from.Origin
}
return nil
}
// Validate that the device exists
func validateDevice(d string, w http.ResponseWriter) error {
var device models.Device
if err := getDeviceByName(&device, d); err != nil {
if err == mgo.ErrNotFound {
http.Error(w, "Device was not found", http.StatusNotFound)
} else {
http.Error(w, err.Error(), http.StatusServiceUnavailable)
}
return err
}
return nil
}
// Validate that the schedule event exists
func validateEvent(e string, w http.ResponseWriter) error {
var event models.ScheduleEvent
if err := getScheduleEventByName(&event, e); err != nil {
if err == mgo.ErrNotFound {
http.Error(w, "Event was not found", http.StatusNotFound)
} else {
http.Error(w, err.Error(), http.StatusServiceUnavailable)
}
return err
}
return nil
}
func restGetReportById(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
var did string = vars[ID]
var res models.DeviceReport
err := getDeviceReportById(&res, did)
if err != nil {
if err == mgo.ErrNotFound {
http.Error(w, err.Error(), http.StatusNotFound)
} else {
http.Error(w, err.Error(), http.StatusServiceUnavailable)
}
loggingClient.Error(err.Error(), "")
return
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(res)
}
func restGetReportByName(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
n, err := url.QueryUnescape(vars[NAME])
if err != nil {
http.Error(w, err.Error(), http.StatusServiceUnavailable)
loggingClient.Error(err.Error(), "")
return
}
var res models.DeviceReport
err = getDeviceReportByName(&res, n)
if err != nil {
if err == mgo.ErrNotFound {
http.Error(w, err.Error(), http.StatusNotFound)
} else {
http.Error(w, err.Error(), http.StatusServiceUnavailable)
}
loggingClient.Error(err.Error(), "")
return
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(res)
}
// Get a list of value descriptor names
// The names are a union of all the value descriptors from the device reports for the given device
func restGetValueDescriptorsForDeviceName(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
n, err := url.QueryUnescape(vars[DEVICENAME])
if err != nil {
http.Error(w, err.Error(), http.StatusServiceUnavailable)
loggingClient.Error(err.Error(), "")
return
}
// Get all the associated device reports
var reports []models.DeviceReport
if err = getDeviceReportByDeviceName(&reports, n); err != nil {
loggingClient.Error(err.Error(), "")
http.Error(w, err.Error(), http.StatusServiceUnavailable)
return
}
valueDescriptors := []string{}
for _, report := range reports {
for _, e := range report.Expected {
valueDescriptors = append(valueDescriptors, e)
}
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(valueDescriptors)
}
func restGetDeviceReportByDeviceName(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
n, err := url.QueryUnescape(vars[DEVICENAME])
if err != nil {
loggingClient.Error(err.Error(), "")
http.Error(w, err.Error(), http.StatusServiceUnavailable)
return
}
res := make([]models.DeviceReport, 0)
err = getDeviceReportByDeviceName(&res, n)
if err != nil {
http.Error(w, err.Error(), http.StatusServiceUnavailable)
loggingClient.Error(err.Error(), "")
return
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(res)
}
func restDeleteReportById(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
var id string = vars[ID]
// Check if the device report exists
var dr models.DeviceReport
if err := getDeviceReportById(&dr, id); err != nil {
if err == mgo.ErrNotFound {
http.Error(w, err.Error(), http.StatusNotFound)
} else {
http.Error(w, err.Error(), http.StatusServiceUnavailable)
}
loggingClient.Error(err.Error(), "")
return
}
if err := deleteDeviceReport(dr, w); err != nil {
loggingClient.Error(err.Error(), "")
return
}
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
w.Write([]byte("true"))
}
func restDeleteReportByName(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
n, err := url.QueryUnescape(vars[NAME])
if err != nil {
loggingClient.Error(err.Error(), "")
http.Error(w, err.Error(), http.StatusServiceUnavailable)
return
}
// Check if the device report exists
var dr models.DeviceReport
if err = getDeviceReportByName(&dr, n); err != nil {
if err == mgo.ErrNotFound {
http.Error(w, err.Error(), http.StatusNotFound)
} else {
http.Error(w, err.Error(), http.StatusServiceUnavailable)
}
loggingClient.Error(err.Error(), "")
return
}
if err = deleteDeviceReport(dr, w); err != nil {
loggingClient.Error(err.Error(), "")
return
}
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
w.Write([]byte("true"))
}
func deleteDeviceReport(dr models.DeviceReport, w http.ResponseWriter) error {
if err := deleteById(DRCOL, dr.Id.Hex()); err != nil {
http.Error(w, err.Error(), http.StatusServiceUnavailable)
return err
}
// Notify Associates
if err := notifyDeviceReportAssociates(dr, http.MethodDelete); err != nil {
return err
}
return nil
}
// Notify the associated device services to the device report
func
|
(dr models.DeviceReport, action string) error {
// Get the device of the report
var d models.Device
if err := getDeviceByName(&d, dr.Device); err != nil {
return err
}
// Get the device service for the device
var ds models.DeviceService
if err := getDeviceServiceById(&ds, d.Service.Service.Id.Hex()); err != nil {
return err
}
var services []models.DeviceService
services = append(services, ds)
// Notify the associating device services
if err := notifyAssociates(services, dr.Id.Hex(), action, models.REPORT); err != nil {
return err
}
return nil
}
|
notifyDeviceReportAssociates
|
updateGlobalUserInGroup.js
|
export const UPDATE_GLOBAL_USER_IN_GROUP_INVALID =
'UPDATE_GLOBAL_USER_IN_GROUP_INVALID';
export const UPDATE_GLOBAL_USER_IN_GROUP_REQUESTING =
'UPDATE_GLOBAL_USER_IN_GROUP_REQUESTING';
export const UPDATE_GLOBAL_USER_IN_GROUP_SUCCESS =
|
'UPDATE_GLOBAL_USER_IN_GROUP_SUCCESS';
export const UPDATE_GLOBAL_USER_IN_GROUP_FAILURE =
'UPDATE_GLOBAL_USER_IN_GROUP_FAILURE';
| |
moodevalyoutube.py
|
#!/usr/bin/python
"""
This code executes a search request for the specified search mood.
It takes in a Youtube API key provided by the developer
It randomizes the top results and returns a random URL based on what mood was
called
"""
import argparse
import logging
import json
import random
from pprint import pprint
#Importing all the required libraries
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
#Makes the youtube request annd parses it
DEVELOPER_KEY = 'AIzaSyCEGv-JQcjMIdpXYO9eIhAGWKLJn1XzXos' #Key
YOUTUBE_API_SERVICE_NAME = 'youtube' #API keys
YOUTUBE_API_VERSION = 'v3' #API version
def
|
(keyword, limit):
"""
This function finds the videos which correlate with the specified mood
and returns the list of the URLs of the videos.
Args:
keyword(str): The mood
limit (int): The number to be returned.
Returns:
A list of URL code.
"""
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
developerKey = DEVELOPER_KEY)
# Call the search.list method to retrieve results matching the specified
# query term.
# Calls the search response to search for the list from Youtube
search_response = youtube.search().list(q=keyword, part='id,snippet',
maxResults=limit)
search_response = search_response.execute()
# Create an empty list of videos
videos = []
# Add each result to the appropriate list, and then display the lists of
# matching videos.
for search_result in search_response.get('items', []):
if search_result['id']['kind'] == 'youtube#video':
video_id = search_result['id']['videoId']
video_url = "https://www.youtube.com/watch?v={0}".format(video_id)
videos.append(video_url)
return videos
if __name__ == '__main__':
try:
print(mood_eval_youtube("happy", 50))
except HttpError as e:
print ('An HTTP error %d occurred:\n%s' % (e.resp.status, e.content))
|
mood_eval_youtube
|
checks.py
|
from typing import List
from django.apps.config import AppConfig
from django.core.checks import CheckMessage, Critical, Tags, register
@register(Tags.compatibility)
def check_USPS_api_auth(app_configs: AppConfig = None, **kwargs) -> List[CheckMessage]:
"""
check_USPS_api_auth:
Checks if the user has supplied a USPS username/password.
Args:
appconfig (AppConfig, optional): Defaults to None.
|
from . import settings as tax_settings
messages = []
if not tax_settings.USPS_USER:
msg = "Could not find a USPS User."
hint = "Add TAXTEA_USPS_USER to your settings."
messages.append(Critical(msg, hint=hint, id="tax.C001"))
return messages
@register(Tags.compatibility)
def check_Avalara_api_auth(
app_configs: AppConfig = None, **kwargs
) -> List[CheckMessage]:
"""
check_Avalara_api_auth:
Checks if the user has supplied a Avalara username/password.
Args:
appconfig (AppConfig, optional): Defaults to None.
Returns:
List[checks.CheckMessage]: List of Django CheckMessages
"""
from . import settings as tax_settings
messages = []
if not tax_settings.AVALARA_USER:
msg = "Could not find a Avalara User."
hint = "Add TAXTEA_AVALARA_USER to your settings."
messages.append(Critical(msg, hint=hint, id="tax.C002"))
if not tax_settings.AVALARA_PASSWORD:
msg = "Could not find a Avalara Password."
hint = "Add TAXTEA_AVALARA_PASSWORD to your settings."
messages.append(Critical(msg, hint=hint, id="tax.C003"))
return messages
@register(Tags.compatibility)
def check_origin_zips(app_configs: AppConfig = None, **kwargs) -> List[CheckMessage]:
"""
check_origin_zips:
Checks if the user has supplied at least one origin zip.
Args:
appconfig (AppConfig, optional): Defaults to None.
Returns:
List[checks.CheckMessage]: List of Django CheckMessages
"""
from . import settings as tax_settings
messages = []
if not tax_settings.NEXUSES:
msg = "Could not find a Nexus."
hint = "Add at least one TAXTEA_NEXUSES to your settings."
messages.append(Critical(msg, hint=hint, id="tax.C004"))
# If there is no TAX_NEXUS, then the next check will throw an IndexError
return messages
state, zip_code = tax_settings.NEXUSES[0]
if not state and not zip_code:
msg = "Could not find a valid Nexus tuple."
hint = "Add at least one Nexus tuple ('STATE', 'ZIPCODE') to your settings."
messages.append(Critical(msg, hint=hint, id="tax.C005"))
return messages
|
Returns:
List[checks.CheckMessage]: List of Django CheckMessages
"""
|
a.js
|
import React from "react";
function
|
(props) {
return (
<section className="text-gray-600 body-font dark:text-gray-400 dark:bg-gray-900">
<div className="container px-5 py-24 mx-auto">
<h1 className="text-3xl font-medium title-font text-gray-900 mb-12 text-center dark:text-white">
Testimonials
</h1>
<div className="flex flex-wrap -m-4">
<div className="p-4 md:w-1/2 w-full">
<div className="h-full bg-gray-100 p-8 rounded dark:bg-gray-800">
<svg
xmlns="http://www.w3.org/2000/svg"
fill="currentColor"
className="block w-5 h-5 text-gray-400 mb-4 dark:text-gray-500"
viewBox="0 0 975.036 975.036"
>
<path d="M925.036 57.197h-304c-27.6 0-50 22.4-50 50v304c0 27.601 22.4 50 50 50h145.5c-1.9 79.601-20.4 143.3-55.4 191.2-27.6 37.8-69.399 69.1-125.3 93.8-25.7 11.3-36.8 41.7-24.8 67.101l36 76c11.6 24.399 40.3 35.1 65.1 24.399 66.2-28.6 122.101-64.8 167.7-108.8 55.601-53.7 93.7-114.3 114.3-181.9 20.601-67.6 30.9-159.8 30.9-276.8v-239c0-27.599-22.401-50-50-50zM106.036 913.497c65.4-28.5 121-64.699 166.9-108.6 56.1-53.7 94.4-114.1 115-181.2 20.6-67.1 30.899-159.6 30.899-277.5v-239c0-27.6-22.399-50-50-50h-304c-27.6 0-50 22.4-50 50v304c0 27.601 22.4 50 50 50h145.5c-1.9 79.601-20.4 143.3-55.4 191.2-27.6 37.8-69.4 69.1-125.3 93.8-25.7 11.3-36.8 41.7-24.8 67.101l35.9 75.8c11.601 24.399 40.501 35.2 65.301 24.399z" />
</svg>
<p className="leading-relaxed mb-6">
Synth chartreuse iPhone lomo cray raw denim brunch everyday
carry neutra before they sold out fixie 90's microdosing.
Tacos pinterest fanny pack venmo, post-ironic heirloom try-hard
pabst authentic iceland.
</p>
<a href className="inline-flex items-center">
<img alt="testimonial" src="https://dummyimage.com/106x106" className="w-12 h-12 rounded-full flex-shrink-0 object-cover object-center" />
<span className="flex-grow flex flex-col pl-4">
<span className="title-font font-medium text-gray-900 dark:text-white">
Holden Caulfield
</span>
<span className="text-gray-500 text-sm">UI DEVELOPER</span>
</span>
</a>
</div>
</div>
<div className="p-4 md:w-1/2 w-full">
<div className="h-full bg-gray-100 p-8 rounded dark:bg-gray-800">
<svg
xmlns="http://www.w3.org/2000/svg"
fill="currentColor"
className="block w-5 h-5 text-gray-400 mb-4 dark:text-gray-500"
viewBox="0 0 975.036 975.036"
>
<path d="M925.036 57.197h-304c-27.6 0-50 22.4-50 50v304c0 27.601 22.4 50 50 50h145.5c-1.9 79.601-20.4 143.3-55.4 191.2-27.6 37.8-69.399 69.1-125.3 93.8-25.7 11.3-36.8 41.7-24.8 67.101l36 76c11.6 24.399 40.3 35.1 65.1 24.399 66.2-28.6 122.101-64.8 167.7-108.8 55.601-53.7 93.7-114.3 114.3-181.9 20.601-67.6 30.9-159.8 30.9-276.8v-239c0-27.599-22.401-50-50-50zM106.036 913.497c65.4-28.5 121-64.699 166.9-108.6 56.1-53.7 94.4-114.1 115-181.2 20.6-67.1 30.899-159.6 30.899-277.5v-239c0-27.6-22.399-50-50-50h-304c-27.6 0-50 22.4-50 50v304c0 27.601 22.4 50 50 50h145.5c-1.9 79.601-20.4 143.3-55.4 191.2-27.6 37.8-69.4 69.1-125.3 93.8-25.7 11.3-36.8 41.7-24.8 67.101l35.9 75.8c11.601 24.399 40.501 35.2 65.301 24.399z" />
</svg>
<p className="leading-relaxed mb-6">
Synth chartreuse iPhone lomo cray raw denim brunch everyday
carry neutra before they sold out fixie 90's microdosing.
Tacos pinterest fanny pack venmo, post-ironic heirloom try-hard
pabst authentic iceland.
</p>
<a href className="inline-flex items-center">
<img alt="testimonial" src="https://dummyimage.com/107x107" className="w-12 h-12 rounded-full flex-shrink-0 object-cover object-center" />
<span className="flex-grow flex flex-col pl-4">
<span className="title-font font-medium text-gray-900 dark:text-white">
Alper Kamu
</span>
<span className="text-gray-500 text-sm">DESIGNER</span>
</span>
</a>
</div>
</div>
</div>
</div>
</section>
);
}
export default TestimonialA;
|
TestimonialA
|
lodashMini.d.ts
|
/**
* lodashMini.ts
*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT license.
*
* Imports a subset of lodash library needed for ReactXP's implementation.
*/
import clone = require('lodash/clone');
|
import isEqual = require('lodash/isEqual');
import pull = require('lodash/pull');
import sortBy = require('lodash/sortBy');
export interface Dictionary<T> {
[index: string]: T;
}
export { clone, compact, filter, isEqual, pull, sortBy };
|
import compact = require('lodash/compact');
import filter = require('lodash/filter');
|
main.rs
|
use bytes::{Buf, BufMut, BytesMut, IntoBuf};
use std::fmt;
use std::net::UdpSocket;
#[derive(Default)]
struct DNSResponse {
id: u16,
qr: bool,
opcode: u8,
aa: bool,
tc: bool,
rd: bool,
ra: bool,
z: u8,
rcode: u8,
ancount: u16,
nscount: u16,
arcount: u16,
}
impl<'a> DNSResponse {
fn new() -> DNSResponse {
DNSResponse {
id: rand::random::<u16>(),
qr: false,
opcode: 0,
aa: false,
tc: false,
rd: true,
ra: false,
z: 0,
rcode: 0,
ancount: 0,
nscount: 0,
arcount: 0,
..Default::default()
}
}
fn from_buffer(&mut self, buf: &mut Buf) {
self.id = buf.get_u16_le();
let mut byte = buf.get_u8();
self.qr = if byte & 0b10000000 > 0 { true } else { false };
self.opcode = byte & 0b01111000;
self.aa = if byte & 0b00000100 > 0 { true } else { false };
self.tc = if byte & 0b00000010 > 0 { true } else { false };
self.rd = if byte & 0b00000001 > 0 { true } else { false };
byte = buf.get_u8();
self.ra = if byte & 0b10000000 > 0 { true } else { false };
self.z = byte & 0b01110000;
self.rcode = byte & 0b00001111;
}
}
#[derive(Default)]
struct DNSRequest<'a> {
id: u16,
qr: bool,
opcode: u8,
aa: bool,
tc: bool,
rd: bool,
ra: bool,
z: u8,
rcode: u8,
ancount: u16,
nscount: u16,
arcount: u16,
names: Vec<Vec<&'a str>>,
}
impl<'a> DNSRequest<'a> {
fn new() -> DNSRequest<'a> {
DNSRequest {
id: rand::random::<u16>(),
qr: false,
opcode: 0,
aa: false,
tc: false,
rd: true,
ra: false,
z: 0,
rcode: 0,
ancount: 0,
nscount: 0,
arcount: 0,
..Default::default()
}
}
fn to_buffer(&self) -> BytesMut {
let mut buf = BytesMut::with_capacity(1024);
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
// | ID . |
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
// |QR| Opcode |AA|TC|RD|RA| Z | RCODE |
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
// | QDCOUNT . |
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
// | ANCOUNT . |
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
// | NSCOUNT . |
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
// | ARCOUNT . |
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
// 16 bits - id
buf.put_u16_le(self.id);
// 8 bits
// QR - 1 bit
// Opcode - 4 bits
// AA - 1 bit
// TC - 1 bit
// RD - 1 bit
let mut bt: u8 = self.opcode;
bt <<= 3;
if self.qr == true {
bt ^= 0b10000000;
}
if self.aa == true {
bt ^= 0b00000100;
}
if self.tc == true {
bt ^= 0b00000010;
}
if self.rd == true {
bt ^= 0b00000001;
}
buf.put_u8(bt);
// 8 bits
// RA - 1 bit
// Z - 3 bits
// Rcode - 4 bits
bt = self.z;
bt <<= 4;
if self.ra == true {
bt ^= 0b10000000;
}
bt ^= self.rcode & 0b00001111;
buf.put_u8(bt);
// 16 bits (QDCOUNT)
buf.put_u16_be(self.qdcount());
// 16 bits (ANCOUNT)
buf.put_u16_be(self.ancount);
// 16 bits (NSCOUNT)
buf.put_u16_be(self.nscount);
// 16 bits (ARCOUNT)
buf.put_u16_be(self.arcount);
// Names
for name in &self.names {
for part in name {
buf.put_u8(part.len() as u8);
for c in part.chars() {
buf.put_u8(c as u8);
}
}
buf.put_u8(0); // End of name
// QTYPE (Type A Query - host address)
buf.put_u8(0);
buf.put_u8(1);
// QCLASS (Class IN - internet address)
buf.put_u8(0);
buf.put_u8(1);
}
buf
}
fn add_question(&mut self, name: &'a String) {
// TODO: Validate name (only dots and numalpha?)
|
fn qdcount(&self) -> u16 {
self.names.len() as u16
}
}
fn get_bits(num: u16, count: usize) -> String {
let mut mask = 1;
let mut counter = count;
loop {
counter -= 1;
if counter == 0 {
break;
}
mask <<= 1;
mask |= 1;
}
let opcode = num & mask;
let binary_string = format!("{:016b}", opcode);
binary_string.chars().skip(16 - count).collect()
}
impl fmt::Debug for DNSResponse {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "--- Begin of packet ---");
writeln!(f, "id:\t{}", self.id);
writeln!(f, "qr:\t{}", self.qr);
writeln!(f, "opcode:\t{}", get_bits(self.opcode as u16, 4));
writeln!(f, "aa:\t{}", self.aa);
writeln!(f, "tc:\t{}", self.tc);
writeln!(f, "rd:\t{}", self.rd);
writeln!(f, "ra:\t{}", self.ra);
writeln!(f, "z:\t{}", get_bits(self.z as u16, 3));
writeln!(f, "rcode:\t{}", get_bits(self.rcode as u16, 4));
writeln!(f, "--");
//writeln!(f, "qdcount:\t{}", get_bits(self.qdcount(), 16));
writeln!(f, "ancount:\t{}", get_bits(self.ancount, 16));
writeln!(f, "nscount:\t{}", get_bits(self.nscount, 16));
writeln!(f, "arcount:\t{}", get_bits(self.arcount, 16));
writeln!(f, "--");
let mut name_count = 1;
/*
for n in &self.names {
writeln!(f, "name #{}:", name_count);
name_count += 1;
for part in n {
writeln!(f, "(len: {})\t{}", part.len(), part);
}
}
*/
writeln!(f, "--- End of packet ---")
}
}
impl<'a> fmt::Debug for DNSRequest<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "--- Begin of packet ---");
writeln!(f, "id:\t{}", self.id);
writeln!(f, "qr:\t{}", self.qr);
writeln!(f, "opcode:\t{}", get_bits(self.opcode as u16, 4));
writeln!(f, "aa:\t{}", self.aa);
writeln!(f, "tc:\t{}", self.tc);
writeln!(f, "rd:\t{}", self.rd);
writeln!(f, "ra:\t{}", self.ra);
writeln!(f, "z:\t{}", get_bits(self.z as u16, 3));
writeln!(f, "rcode:\t{}", get_bits(self.rcode as u16, 4));
writeln!(f, "--");
writeln!(f, "qdcount:\t{}", get_bits(self.qdcount(), 16));
writeln!(f, "ancount:\t{}", get_bits(self.ancount, 16));
writeln!(f, "nscount:\t{}", get_bits(self.nscount, 16));
writeln!(f, "arcount:\t{}", get_bits(self.arcount, 16));
writeln!(f, "--");
let mut name_count = 1;
for n in &self.names {
writeln!(f, "name #{}:", name_count);
name_count += 1;
for part in n {
writeln!(f, "(len: {})\t{}", part.len(), part);
}
}
writeln!(f, "--- End of packet ---")
}
}
fn dump_buffer(buf: &bytes::BytesMut) {
println!("Binary packet representation:");
let mut space_counter = 0;
let mut group_counter = 0;
for i in buf.iter() {
print!("{:02x}", i);
space_counter += 1;
if (space_counter % 2) == 0 {
print!(" ");
group_counter += 1;
if (group_counter % 8) == 0 {
println!();
}
}
}
println!();
}
fn main() {
let mut req = DNSRequest::new();
let name01 = String::from("www.wp.pl");
let name02 = String::from("www.vatican.va");
req.add_question(&name01);
req.add_question(&name02);
println!("{:?}", req);
let binary_representation = req.to_buffer();
dump_buffer(&binary_representation);
let socket = UdpSocket::bind("0.0.0.0:0").expect("Couldn't bind to address");
socket
.send_to(&binary_representation[..], ("8.8.8.8", 53))
.expect("Couldn't send DNS request");
let mut buf = [0; 2048];
let (amt, _) = socket
.recv_from(&mut buf)
.expect("Couldn't receive response");
println!();
println!("Received {} bytes of response\n", amt);
let mut bb = BytesMut::with_capacity(amt);
for x in 0..amt {
bb.put_u8(buf[x]);
}
dump_buffer(&bb);
let mut xxx = buf.into_buf();
let mut resp = DNSResponse::new();
resp.from_buffer(&mut xxx);
println!();
println!("{:?}", resp);
}
|
let parts: Vec<_> = name.split('.').map(|x| x).collect();
self.names.push(parts);
}
|
bare_emoji_test.go
|
package testers
import (
"strconv"
"testing"
"github.com/binhonglee/kdlgo"
)
func TestBAREEMOJI(t *testing.T)
|
{
objs, err := kdlgo.ParseFile("../kdls/bare_emoji.kdl")
if err != nil {
t.Fatal(err)
}
expected := []string{
`😁 "happy!"`,
}
if len(objs.GetValue().Objects) != len(expected) {
t.Fatal(
"There should be " + strconv.Itoa(len(expected)) +
" KDLObjects. Got " + strconv.Itoa(len(objs.GetValue().Objects)) + " instead.",
)
}
for i, obj := range objs.GetValue().Objects {
s, err := kdlgo.RecreateKDLObj(obj)
if err != nil {
t.Fatal(err)
return
}
if s != expected[i] {
t.Error(
"Item number "+strconv.Itoa(i+1)+" is incorrectly parsed.\n",
"Expected: '"+expected[i]+"' but got '"+s+"' instead",
)
}
}
}
|
|
user.entity.ts
|
import { Role } from 'src/shared/enums';
import { Column, Entity } from 'typeorm';
import { BaseEntity } from './base';
@Entity('User')
export class
|
extends BaseEntity {
@Column({ length: 25, unique: true })
userName:string;
@Column()
firstName:string;
@Column()
lastName:string;
@Column({unique: true})
email:string;
@Column()
passwordHash:string;
@Column({default:Role.User})
role: Role
@Column({default:true})
status: boolean
@Column({default:false})
confirmedEmail:boolean;
}
|
UserEntity
|
lex_test.go
|
package js // import "github.com/tdewolff/parse/js"
import (
"bytes"
"fmt"
"io"
"strconv"
"testing"
"github.com/tdewolff/test"
)
func helperStringify(t *testing.T, input string) string
|
////////////////////////////////////////////////////////////////
type TTs []TokenType
func TestTokens(t *testing.T) {
var tokenTests = []struct {
js string
expected []TokenType
}{
{" \t\v\f\u00A0\uFEFF\u2000", TTs{}}, // WhitespaceToken
{"\n\r\r\n\u2028\u2029", TTs{LineTerminatorToken}},
{"5.2 .04 0x0F 5e99", TTs{NumericToken, NumericToken, NumericToken, NumericToken}},
{"a = 'string'", TTs{IdentifierToken, PunctuatorToken, StringToken}},
{"/*comment*/ //comment", TTs{CommentToken, CommentToken}},
{"{ } ( ) [ ]", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken}},
{". ; , < > <=", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken}},
{">= == != === !==", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken}},
{"+ - * % ++ --", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken}},
{"<< >> >>> & | ^", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken}},
{"! ~ && || ? :", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken}},
{"= += -= *= %= <<=", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken}},
{">>= >>>= &= |= ^= =>", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken, PunctuatorToken}},
{"a = /.*/g;", TTs{IdentifierToken, PunctuatorToken, RegexpToken, PunctuatorToken}},
{"/*co\nm\u2028m/*ent*/ //co//mment\u2029//comment", TTs{CommentToken, CommentToken, LineTerminatorToken, CommentToken}},
{"$ _\u200C \\u2000 \u200C", TTs{IdentifierToken, IdentifierToken, IdentifierToken, UnknownToken}},
{">>>=>>>>=", TTs{PunctuatorToken, PunctuatorToken, PunctuatorToken}},
{"/", TTs{PunctuatorToken}},
{"/=", TTs{PunctuatorToken}},
{"010xF", TTs{NumericToken, NumericToken, IdentifierToken}},
{"50e+-0", TTs{NumericToken, IdentifierToken, PunctuatorToken, PunctuatorToken, NumericToken}},
{"'str\\i\\'ng'", TTs{StringToken}},
{"'str\\\\'abc", TTs{StringToken, IdentifierToken}},
{"'str\\\ni\\\\u00A0ng'", TTs{StringToken}},
{"a = /[a-z/]/g", TTs{IdentifierToken, PunctuatorToken, RegexpToken}},
{"a=/=/g1", TTs{IdentifierToken, PunctuatorToken, RegexpToken}},
{"a = /'\\\\/\n", TTs{IdentifierToken, PunctuatorToken, RegexpToken, LineTerminatorToken}},
{"a=/\\//g1", TTs{IdentifierToken, PunctuatorToken, RegexpToken}},
{"new RegExp(a + /\\d{1,2}/.source)", TTs{IdentifierToken, IdentifierToken, PunctuatorToken, IdentifierToken, PunctuatorToken, RegexpToken, PunctuatorToken, IdentifierToken, PunctuatorToken}},
{"0b0101 0o0707 0b17", TTs{NumericToken, NumericToken, NumericToken, NumericToken}},
{"`template`", TTs{TemplateToken}},
{"`a${x+y}b`", TTs{TemplateToken, IdentifierToken, PunctuatorToken, IdentifierToken, TemplateToken}},
{"`temp\nlate`", TTs{TemplateToken}},
// early endings
{"'string", TTs{StringToken}},
{"'\n '\u2028", TTs{UnknownToken, LineTerminatorToken, UnknownToken, LineTerminatorToken}},
{"'str\\\U00100000ing\\0'", TTs{StringToken}},
{"'strin\\00g'", TTs{StringToken}},
{"/*comment", TTs{CommentToken}},
{"a=/regexp", TTs{IdentifierToken, PunctuatorToken, RegexpToken}},
{"\\u002", TTs{UnknownToken, IdentifierToken}},
// coverage
{"Ø a〉", TTs{IdentifierToken, IdentifierToken, UnknownToken}},
{"0xg 0.f", TTs{NumericToken, IdentifierToken, NumericToken, PunctuatorToken, IdentifierToken}},
{"0bg 0og", TTs{NumericToken, IdentifierToken, NumericToken, IdentifierToken}},
{"\u00A0\uFEFF\u2000", TTs{}},
{"\u2028\u2029", TTs{LineTerminatorToken}},
{"\\u0029ident", TTs{IdentifierToken}},
{"\\u{0029FEF}ident", TTs{IdentifierToken}},
{"\\u{}", TTs{UnknownToken, IdentifierToken, PunctuatorToken, PunctuatorToken}},
{"\\ugident", TTs{UnknownToken, IdentifierToken}},
{"'str\u2028ing'", TTs{UnknownToken, IdentifierToken, LineTerminatorToken, IdentifierToken, StringToken}},
{"a=/\\\n", TTs{IdentifierToken, PunctuatorToken, PunctuatorToken, UnknownToken, LineTerminatorToken}},
{"a=/x/\u200C\u3009", TTs{IdentifierToken, PunctuatorToken, RegexpToken, UnknownToken}},
{"a=/x\n", TTs{IdentifierToken, PunctuatorToken, PunctuatorToken, IdentifierToken, LineTerminatorToken}},
// go fuzz
{"`", TTs{UnknownToken}},
}
for _, tt := range tokenTests {
stringify := helperStringify(t, tt.js)
l := NewLexer(bytes.NewBufferString(tt.js))
i := 0
for {
token, _ := l.Next()
if token == ErrorToken {
test.That(t, i == len(tt.expected), "when error occurred we must be at the end in "+stringify)
test.Error(t, l.Err(), io.EOF, "in "+stringify)
break
} else if token == WhitespaceToken {
continue
}
test.That(t, i < len(tt.expected), "index", i, "must not exceed expected token types size", len(tt.expected), "in "+stringify)
if i < len(tt.expected) {
test.That(t, token == tt.expected[i], "token types must match at index "+strconv.Itoa(i)+" in "+stringify)
}
i++
}
}
test.String(t, WhitespaceToken.String(), "Whitespace")
test.String(t, TokenType(100).String(), "Invalid(100)")
}
////////////////////////////////////////////////////////////////
func ExampleNewLexer() {
l := NewLexer(bytes.NewBufferString("var x = 'lorem ipsum';"))
out := ""
for {
tt, data := l.Next()
if tt == ErrorToken {
break
}
out += string(data)
l.Free(len(data))
}
fmt.Println(out)
// Output: var x = 'lorem ipsum';
}
|
{
s := ""
l := NewLexer(bytes.NewBufferString(input))
for i := 0; i < 10; i++ {
tt, data := l.Next()
if tt == ErrorToken {
if l.Err() != nil {
s += tt.String() + "('" + l.Err().Error() + "')"
} else {
s += tt.String() + "(nil)"
}
break
} else if tt == WhitespaceToken {
continue
} else {
s += tt.String() + "('" + string(data) + "') "
}
}
return s
}
|
normalizer.go
|
/*
Copyright 2019 The Vitess Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sqlparser
import (
"fmt"
"vitess.io/vitess/go/sqltypes"
querypb "vitess.io/vitess/go/vt/proto/query"
)
// Normalize changes the statement to use bind values, and
// updates the bind vars to those values. The supplied prefix
// is used to generate the bind var names. The function ensures
// that there are no collisions with existing bind vars.
// Within Select constructs, bind vars are deduped. This allows
// us to identify vindex equality. Otherwise, every value is
// treated as distinct.
func Normalize(stmt Statement, bindVars map[string]*querypb.BindVariable, prefix string) {
nz := newNormalizer(stmt, bindVars, prefix)
_ = Walk(nz.WalkStatement, stmt)
}
type normalizer struct {
stmt Statement
bindVars map[string]*querypb.BindVariable
prefix string
reserved map[string]struct{}
counter int
vals map[string]string
}
func newNormalizer(stmt Statement, bindVars map[string]*querypb.BindVariable, prefix string) *normalizer {
return &normalizer{
stmt: stmt,
bindVars: bindVars,
prefix: prefix,
reserved: GetBindvars(stmt),
counter: 1,
vals: make(map[string]string),
}
}
// WalkStatement is the top level walk function.
// If it encounters a Select, it switches to a mode
// where variables are deduped.
func (nz *normalizer) WalkStatement(node SQLNode) (bool, error) {
switch node := node.(type) {
|
// Don't continue
return false, nil
case *SQLVal:
nz.convertSQLVal(node)
case *ComparisonExpr:
nz.convertComparison(node)
case *ColName, TableName:
// Common node types that never contain SQLVals or ListArgs but create a lot of object
// allocations.
return false, nil
}
return true, nil
}
// WalkSelect normalizes the AST in Select mode.
func (nz *normalizer) WalkSelect(node SQLNode) (bool, error) {
switch node := node.(type) {
case *SQLVal:
nz.convertSQLValDedup(node)
case *ComparisonExpr:
nz.convertComparison(node)
case *ColName, TableName:
// Common node types that never contain SQLVals or ListArgs but create a lot of object
// allocations.
return false, nil
case OrderBy, GroupBy:
// do not make a bind var for order by column_position
return false, nil
}
return true, nil
}
func (nz *normalizer) convertSQLValDedup(node *SQLVal) {
// If value is too long, don't dedup.
// Such values are most likely not for vindexes.
// We save a lot of CPU because we avoid building
// the key for them.
if len(node.Val) > 256 {
nz.convertSQLVal(node)
return
}
// Make the bindvar
bval := nz.sqlToBindvar(node)
if bval == nil {
return
}
// Check if there's a bindvar for that value already.
var key string
if bval.Type == sqltypes.VarBinary {
// Prefixing strings with "'" ensures that a string
// and number that have the same representation don't
// collide.
key = "'" + string(node.Val)
} else {
key = string(node.Val)
}
bvname, ok := nz.vals[key]
if !ok {
// If there's no such bindvar, make a new one.
bvname = nz.newName()
nz.vals[key] = bvname
nz.bindVars[bvname] = bval
}
// Modify the AST node to a bindvar.
node.Type = ValArg
node.Val = append([]byte(":"), bvname...)
}
// convertSQLVal converts an SQLVal without the dedup.
func (nz *normalizer) convertSQLVal(node *SQLVal) {
bval := nz.sqlToBindvar(node)
if bval == nil {
return
}
bvname := nz.newName()
nz.bindVars[bvname] = bval
node.Type = ValArg
node.Val = append([]byte(":"), bvname...)
}
// convertComparison attempts to convert IN clauses to
// use the list bind var construct. If it fails, it returns
// with no change made. The walk function will then continue
// and iterate on converting each individual value into separate
// bind vars.
func (nz *normalizer) convertComparison(node *ComparisonExpr) {
if node.Operator != InStr && node.Operator != NotInStr {
return
}
tupleVals, ok := node.Right.(ValTuple)
if !ok {
return
}
// The RHS is a tuple of values.
// Make a list bindvar.
bvals := &querypb.BindVariable{
Type: querypb.Type_TUPLE,
}
for _, val := range tupleVals {
bval := nz.sqlToBindvar(val)
if bval == nil {
return
}
bvals.Values = append(bvals.Values, &querypb.Value{
Type: bval.Type,
Value: bval.Value,
})
}
bvname := nz.newName()
nz.bindVars[bvname] = bvals
// Modify RHS to be a list bindvar.
node.Right = ListArg(append([]byte("::"), bvname...))
}
func (nz *normalizer) sqlToBindvar(node SQLNode) *querypb.BindVariable {
if node, ok := node.(*SQLVal); ok {
var v sqltypes.Value
var err error
switch node.Type {
case StrVal:
v, err = sqltypes.NewValue(sqltypes.VarBinary, node.Val)
case IntVal:
v, err = sqltypes.NewValue(sqltypes.Int64, node.Val)
case FloatVal:
v, err = sqltypes.NewValue(sqltypes.Float64, node.Val)
default:
return nil
}
if err != nil {
return nil
}
return sqltypes.ValueBindVariable(v)
}
return nil
}
func (nz *normalizer) newName() string {
for {
newName := fmt.Sprintf("%s%d", nz.prefix, nz.counter)
if _, ok := nz.reserved[newName]; !ok {
nz.reserved[newName] = struct{}{}
return newName
}
nz.counter++
}
}
// GetBindvars returns a map of the bind vars referenced in the statement.
// TODO(sougou); This function gets called again from vtgate/planbuilder.
// Ideally, this should be done only once.
func GetBindvars(stmt Statement) map[string]struct{} {
bindvars := make(map[string]struct{})
_ = Walk(func(node SQLNode) (kontinue bool, err error) {
switch node := node.(type) {
case *ColName, TableName:
// Common node types that never contain SQLVals or ListArgs but create a lot of object
// allocations.
return false, nil
case *SQLVal:
if node.Type == ValArg {
bindvars[string(node.Val[1:])] = struct{}{}
}
case ListArg:
bindvars[string(node[2:])] = struct{}{}
}
return true, nil
}, stmt)
return bindvars
}
|
case *Select:
_ = Walk(nz.WalkSelect, node)
|
day09.rs
|
use advent_of_code_2021::util::lines;
use std::collections::{HashSet, VecDeque};
type Grid = Vec<Vec<u8>>;
fn adj(grid: &[Vec<u8>], row: usize, col: usize) -> Vec<(usize, usize)> {
let (total_rows, total_cols) = (grid.len(), grid[0].len());
let mut adj = Vec::with_capacity(4);
if row > 0 {
adj.push((row - 1, col));
}
if row < total_rows - 1 {
adj.push((row + 1, col));
}
if col > 0 {
adj.push((row, col - 1));
}
if col < total_cols - 1 {
adj.push((row, col + 1));
}
adj
}
fn basin(grid: &[Vec<u8>], row: usize, col: usize) -> usize {
let mut seen = HashSet::new();
let mut frontier = VecDeque::new();
frontier.push_back((row, col));
seen.insert((row, col));
while !frontier.is_empty() {
let (row, col) = frontier.pop_front().unwrap();
seen.insert((row, col));
let this = grid[row][col];
for (i, j) in adj(grid, row, col) {
let next = grid[i][j];
if !seen.contains(&(i, j)) && next < 9 && this < next {
frontier.push_back((i, j));
}
}
}
seen.len()
}
fn main() {
let grid: Grid = lines()
.into_iter()
.map(|line| line.chars().into_iter().map(|c| c as u8 - b'0').collect())
.collect();
let (rows, cols) = (grid.len(), grid[0].len());
let minimums = (0..rows)
|
adj(&grid, *row, *col)
.into_iter()
.all(|(i, j)| grid[i][j] > cell)
})
.collect::<Vec<(usize, usize)>>();
let part1 = minimums
.iter()
.map(|(row, col)| grid[*row][*col] as usize + 1)
.sum::<usize>();
println!("{}", part1);
let mut basin_sizes = minimums
.iter()
.map(|(row, col)| basin(&grid, *row, *col))
.collect::<Vec<_>>();
basin_sizes.sort_unstable();
let part2 = basin_sizes.iter().rev().take(3).product::<usize>();
println!("{}", part2);
}
|
.into_iter()
.flat_map(|row| (0..cols).into_iter().map(move |col| (row, col)))
.filter(|(row, col)| {
let cell = grid[*row][*col];
|
conflict_test.go
|
/*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package merge_test
import (
"testing"
"sigs.k8s.io/structured-merge-diff/v3/fieldpath"
"sigs.k8s.io/structured-merge-diff/v3/merge"
"sigs.k8s.io/structured-merge-diff/v3/value"
)
var (
// Short names for readable test cases.
_NS = fieldpath.NewSet
_P = fieldpath.MakePathOrDie
_KBF = fieldpath.KeyByFields
_V = value.NewValueInterface
)
func TestNewFromSets(t *testing.T) {
got := merge.ConflictsFromManagers(fieldpath.ManagedFields{
"Bob": fieldpath.NewVersionedSet(
_NS(
_P("key"),
_P("list", _KBF("key", "a", "id", 2), "id"),
),
"v1",
false,
),
"Alice": fieldpath.NewVersionedSet(
_NS(
_P("value"),
_P("list", _KBF("key", "a", "id", 2), "key"),
),
"v1",
false,
),
})
wanted := `conflicts with "Alice":
- .value
- .list[id=2,key="a"].key
conflicts with "Bob":
- .key
- .list[id=2,key="a"].id`
if got.Error() != wanted
|
}
func TestToSet(t *testing.T) {
conflicts := merge.ConflictsFromManagers(fieldpath.ManagedFields{
"Bob": fieldpath.NewVersionedSet(
_NS(
_P("key"),
_P("list", _KBF("key", "a", "id", 2), "id"),
),
"v1",
false,
),
"Alice": fieldpath.NewVersionedSet(
_NS(
_P("value"),
_P("list", _KBF("key", "a", "id", 2), "key"),
),
"v1",
false,
),
})
expected := fieldpath.NewSet(
_P("key"),
_P("value"),
_P("list", _KBF("key", "a", "id", 2), "id"),
_P("list", _KBF("key", "a", "id", 2), "key"),
)
actual := conflicts.ToSet()
if !expected.Equals(actual) {
t.Fatalf("expected\n%v\n, but got\n%v\n", expected, actual)
}
}
|
{
t.Errorf("Got %v, wanted %v", got.Error(), wanted)
}
|
q1.py
|
# Question 1
# This function converts miles to kilometers (km).
# Complete the function to return the result of the conversion
# Call the function to convert the trip distance from miles to kilometers
# Fill in the blank to print the result of the conversion
# Calculate the round-trip in kilometers by doubling the result, and fill in the blank to print the result
# 1) Complete the function to return the result of the conversion
def
|
(miles):
km = miles * 1.6 # approximately 1.6 km in 1 mile
return km
my_trip_miles = 55
# 2) Convert my_trip_miles to kilometers by calling the function above
my_trip_km = convert_distance(my_trip_miles)
# 3) Fill in the blank to print the result of the conversion
print("The distance in kilometers is " + str(my_trip_km))
# 4) Calculate the round-trip in kilometers by doubling the result,
# and fill in the blank to print the result
print("The round-trip in kilometers is " + str(my_trip_km * 2))
|
convert_distance
|
index.js
|
/*
* @Author: your name
* @Date: 2021-12-08 10:04:28
* @LastEditTime: 2021-12-15 15:19:01
* @LastEditors: Please set LastEditors
* @Description: 打开koroFileHeader查看配置 进行设置: https://github.com/OBKoro1/koro1FileHeader/wiki/%E9%85%8D%E7%BD%AE
* @FilePath: \ChinaMobile-app\src\routes\application\components\choice\index.js
*/
/**
* @author Lowkey
* @date 2021/05/11 15:32:28
* @Description:
*/
import { Component } from 'react';
import {
List,
Checkbox,
Button,
} from 'components';
import Tag from 'components/tag';
import styles from './index.less';
const CheckboxItem = Checkbox.CheckboxItem;
const data = [
{ value: '集体学习', label: '集体学习' },
{ value: '复制推广', label: '复制推广' },
{ value: '创新突破', label: '创新突破' },
];
const data2 = [
{ value: 1, label: 'A' },
{ value: 2, label: 'B' },
{ value: 3, label: 'C' },
{ value: 4, label: 'D' },
];
class Choice extends Component {
constructor (props) {
super(props);
|
his.state = {
res: []
};
}
onChange = (val) => {
const arr = this.state.res;
if (arr.includes(val)) {
let index = arr.indexOf(val);
arr.splice(index, 1);
} else {
arr.push(val);
}
this.setState({
res: arr
});
};
componentDidMount () {
const { issueId = '', videoId = '' } = this.props;
this.props.dispatch({
type: 'lessondetails/getTest',
payload: {
videoId,
issueId
},
});
}
onSubmit = () => {
const { issueId = '', videoId = '' } = this.props;
if (this.state.res.length > 0) {
this.props.dispatch({
type: 'lessondetails/sendTest',
payload: {
txt: this.state.res.join(','),
issueId,
videoId
}
});
} else {
Toast.fail('至少选择一个答案');
}
}
renderRes = (answer) => {
const arr = answer.split(',');
return (
<div>
{
arr.map(item => {
return (
<Tag inline key={item} color="pink" text={item} />
);
})
}
</div>
);
}
render () {
const { question = '', answer, type, loading } = this.props;
const res = type === 'recognition' ? data2 : data;
if (answer) {
return this.renderRes(answer);
}
return (
<div>
{type === 'recognition' ? <div className={styles.title}>请回答以下哪些作法是案例中的关键步骤?</div> : null}
<List>
{res.map(i => (
<CheckboxItem key={i.value} onChange={() => this.onChange(i.value)}>
{i.label}
</CheckboxItem>
))}
</List>
<Button loading={loading} onClick={this.onSubmit} type="primary" size="small">提交</Button>
</div>
);
}
}
export default Choice;
|
t
|
custom-keyboard.tsx
|
import classnames from 'classnames';
import {VNode} from 'vue';
import {Options, Vue} from 'vue-class-component';
import {Prop} from 'vue-property-decorator';
import {IS_IOS} from '../../utils/exenv';
import TouchFeedback from '../../vmc-feedback';
@Options({
name: 'KeyboardItem'
})
export class
|
extends Vue {
@Prop({type: [String, Number]})
public value: any;
@Prop(String)
public label: string;
@Prop({type: String})
public type: string;
@Prop({default: 'am-number-keyboard'})
public prefixCls?: string;
get tdRef(): any {
return this.$refs['td'];
}
@Prop()
public iconOnly?: boolean;
@Prop({type: Boolean, default: false})
public disabled: boolean;
public render() {
const {
prefixCls,
disabled,
label,
iconOnly,
...restProps
} = this;
let value: any = this.$slots.default();
const type = this.type;
if (type === 'keyboard-delete') {
value = 'delete';
} else if (type === 'keyboard-hide') {
value = 'hide';
} else if (type === 'keyboard-confirm') {
value = 'confirm';
}
const wrapCls = classnames(`${prefixCls}-item`);
const TouchFeedback2: any = TouchFeedback;
return (
<TouchFeedback2
class={type}
props={
{
activeClassName: `${prefixCls}-item-active`
}
}>
<td
ref="td"
// tslint:disable-next-line:jsx-no-multiline-js
onclick={e => {
this.$emit('click', e, this.value);
}}
class={wrapCls}
{...restProps}
>
{this.$slots.default && this.$slots.default()}
{iconOnly && <i class="sr-only">{label}</i>}
</td>
</TouchFeedback2>
);
}
}
@Options({
name: 'CustomKeyboard'
})
class CustomKeyboard extends Vue {
@Prop()
public prefixCls: string;
@Prop()
public confirmLabel: string;
@Prop()
public backspaceLabel: string;
@Prop()
public cancelKeyboardLabel: string;
@Prop()
public wrapProps: any;
@Prop()
public header: VNode;
public linkedInput: any;
get antmKeyboard(): HTMLDivElement | null {
return this.$refs.antmKeyboard as any;
}
public confirmDisabled: boolean;
public confirmKeyboardItem: HTMLTableDataCellElement | null;
public onKeyboardClick(e, value: string = '') {
e.stopImmediatePropagation();
if (value === 'confirm' && this.confirmDisabled) {
return null;
} else {
if (this.linkedInput) {
this.linkedInput.onKeyboardClick(value);
}
}
}
public renderKeyboardItem(item: string, index: number) {
const KeyboardItem2: any = KeyboardItem;
return (
<KeyboardItem2
props={{value: item}}
onClick={this.onKeyboardClick}
key={`item-${item}-${index}`}>
{item}
</KeyboardItem2>
);
}
public render() {
const {
prefixCls,
confirmLabel,
backspaceLabel,
cancelKeyboardLabel,
wrapProps,
header
} = this;
const wrapperCls = classnames(
`${prefixCls}-wrapper`,
`${prefixCls}-wrapper-hide`
);
const KeyboardItem2: any = KeyboardItem;
return (
<div class={wrapperCls} ref="antmKeyboard" {...wrapProps}>
{header}
<table>
<tbody>
<tr>
{['1', '2', '3'].map((item, index) =>
// tslint:disable-next-line:jsx-no-multiline-js
this.renderKeyboardItem(item, index)
)}
<KeyboardItem2
props={
{
...this.getAriaAttr(backspaceLabel),
type: 'keyboard-delete',
rowSpan: 2
}
}
on={
{
click: e => this.onKeyboardClick(e, 'delete')
}
}
/>
</tr>
<tr>
{['4', '5', '6'].map((item, index) =>
// tslint:disable-next-line:jsx-no-multiline-js
this.renderKeyboardItem(item, index)
)}
</tr>
<tr>
{['7', '8', '9'].map((item, index) =>
// tslint:disable-next-line:jsx-no-multiline-js
this.renderKeyboardItem(item, index)
)}
<KeyboardItem2
props={
{
type: 'keyboard-confirm',
rowSpan: 2
}
}
on={
{
click: e => this.onKeyboardClick(e, 'confirm')
}
}
tdRef="td"
>
{confirmLabel}
</KeyboardItem2>
</tr>
<tr>
{['.', '0'].map((item, index) =>
// tslint:disable-next-line:jsx-no-multiline-js
this.renderKeyboardItem(item, index)
)}
<KeyboardItem2
props={
{
...this.getAriaAttr(cancelKeyboardLabel),
type: 'keyboard-hide'
}
}
on={
{
click: e => this.onKeyboardClick(e, 'hide')
}
}
/>
</tr>
</tbody>
</table>
</div>
);
}
public getAriaAttr(label: string) {
if (IS_IOS) {
return {label, iconOnly: true};
} else {
return {role: 'button', 'aria-label': label};
}
}
}
export default CustomKeyboard as any;
|
KeyboardItem
|
Q1344.py
|
"""
1344
medium
angle between hands of a clock
"""
class Solution:
def angleClock(self, hour: int, minutes: int) -> float:
|
sol = Solution()
hour = 12
minutes = 30
print(sol.angleClock(hour, minutes))
|
min_angle = minutes * 6
hour_angle = hour * 30 + minutes / 2
hand_angles = abs(min_angle - hour_angle)
if hand_angles > 180:
hand_angles = 360 - hand_angles
return hand_angles
|
cfg.rs
|
use std::str::{self, FromStr};
use std::iter;
use std::fmt;
use util::{CraftError, CraftResult, human};
#[derive(Clone, PartialEq, Debug)]
pub enum Cfg {
Name(String),
KeyPair(String, String),
}
#[derive(Clone, PartialEq, Debug)]
pub enum CfgExpr {
Not(Box<CfgExpr>),
All(Vec<CfgExpr>),
Any(Vec<CfgExpr>),
Value(Cfg),
}
#[derive(PartialEq)]
enum Token<'a> {
LeftParen,
RightParen,
Ident(&'a str),
Comma,
Equals,
String(&'a str),
}
struct Tokenizer<'a> {
s: iter::Peekable<str::CharIndices<'a>>,
orig: &'a str,
}
struct Parser<'a> {
t: iter::Peekable<Tokenizer<'a>>,
}
impl FromStr for Cfg {
type Err = Box<CraftError>;
fn from_str(s: &str) -> CraftResult<Cfg> {
let mut p = Parser::new(s);
let e = p.cfg()?;
if p.t.next().is_some() {
bail!("malformed cfg value or key/value pair")
}
Ok(e)
}
}
impl fmt::Display for Cfg {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Cfg::Name(ref s) => s.fmt(f),
Cfg::KeyPair(ref k, ref v) => write!(f, "{} = \"{}\"", k, v),
}
}
}
impl CfgExpr {
pub fn matches(&self, cfg: &[Cfg]) -> bool {
match *self {
CfgExpr::Not(ref e) => !e.matches(cfg),
CfgExpr::All(ref e) => e.iter().all(|e| e.matches(cfg)),
CfgExpr::Any(ref e) => e.iter().any(|e| e.matches(cfg)),
CfgExpr::Value(ref e) => cfg.contains(e),
}
}
}
impl FromStr for CfgExpr {
type Err = Box<CraftError>;
fn from_str(s: &str) -> CraftResult<CfgExpr> {
let mut p = Parser::new(s);
let e = p.expr()?;
if p.t.next().is_some() {
bail!("can only have one cfg-expression, consider using all() or \
any() explicitly")
}
Ok(e)
}
}
impl fmt::Display for CfgExpr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
CfgExpr::Not(ref e) => write!(f, "not({})", e),
CfgExpr::All(ref e) => write!(f, "all({})", CommaSep(e)),
CfgExpr::Any(ref e) => write!(f, "any({})", CommaSep(e)),
CfgExpr::Value(ref e) => write!(f, "{}", e),
}
}
}
struct CommaSep<'a, T: 'a>(&'a [T]);
impl<'a, T: fmt::Display> fmt::Display for CommaSep<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for (i, v) in self.0.iter().enumerate() {
if i > 0 {
write!(f, ", ")?;
}
write!(f, "{}", v)?;
}
Ok(())
}
}
impl<'a> Parser<'a> {
fn new(s: &'a str) -> Parser<'a> {
Parser {
t: Tokenizer {
s: s.char_indices().peekable(),
orig: s,
}
.peekable(),
}
}
fn expr(&mut self) -> CraftResult<CfgExpr> {
match self.t.peek() {
Some(&Ok(Token::Ident(op @ "all"))) |
Some(&Ok(Token::Ident(op @ "any"))) => {
self.t.next();
let mut e = Vec::new();
self.eat(Token::LeftParen)?;
while !self.try(Token::RightParen) {
e.push(self.expr()?);
if !self.try(Token::Comma) {
self.eat(Token::RightParen)?;
break;
}
}
if op == "all" {
Ok(CfgExpr::All(e))
} else {
Ok(CfgExpr::Any(e))
}
}
Some(&Ok(Token::Ident("not"))) => {
self.t.next();
self.eat(Token::LeftParen)?;
let e = self.expr()?;
self.eat(Token::RightParen)?;
Ok(CfgExpr::Not(Box::new(e)))
}
Some(&Ok(..)) => self.cfg().map(CfgExpr::Value),
Some(&Err(..)) => Err(self.t.next().unwrap().err().unwrap()),
None => bail!("expected start of a cfg expression, found nothing"),
}
}
fn cfg(&mut self) -> CraftResult<Cfg> {
match self.t.next() {
Some(Ok(Token::Ident(name))) => {
let e = if self.try(Token::Equals) {
let val = match self.t.next() {
Some(Ok(Token::String(s))) => s,
Some(Ok(t)) => bail!("expected a string, found {}", t.classify()),
Some(Err(e)) => return Err(e),
None => bail!("expected a string, found nothing"),
};
Cfg::KeyPair(name.to_string(), val.to_string())
} else {
Cfg::Name(name.to_string())
};
Ok(e)
}
Some(Ok(t)) => bail!("expected identifier, found {}", t.classify()),
Some(Err(e)) => Err(e),
None => bail!("expected identifier, found nothing"),
}
}
fn try(&mut self, token: Token<'a>) -> bool {
match self.t.peek() {
Some(&Ok(ref t)) if token == *t => {}
_ => return false,
}
self.t.next();
true
}
fn eat(&mut self, token: Token<'a>) -> CraftResult<()>
|
}
impl<'a> Iterator for Tokenizer<'a> {
type Item = CraftResult<Token<'a>>;
fn next(&mut self) -> Option<CraftResult<Token<'a>>> {
loop {
match self.s.next() {
Some((_, ' ')) => {}
Some((_, '(')) => return Some(Ok(Token::LeftParen)),
Some((_, ')')) => return Some(Ok(Token::RightParen)),
Some((_, ',')) => return Some(Ok(Token::Comma)),
Some((_, '=')) => return Some(Ok(Token::Equals)),
Some((start, '"')) => {
while let Some((end, ch)) = self.s.next() {
if ch == '"' {
return Some(Ok(Token::String(&self.orig[start + 1..end])));
}
}
return Some(Err(human("unterminated string in cfg".to_string())));
}
Some((start, ch)) if is_ident_start(ch) => {
while let Some(&(end, ch)) = self.s.peek() {
if !is_ident_rest(ch) {
return Some(Ok(Token::Ident(&self.orig[start..end])));
} else {
self.s.next();
}
}
return Some(Ok(Token::Ident(&self.orig[start..])));
}
Some((_, ch)) => {
return Some(Err(human(format!("unexpected character in \
cfg `{}`, expected parens, \
a comma, an identifier, or \
a string",
ch))))
}
None => return None,
}
}
}
}
fn is_ident_start(ch: char) -> bool {
ch == '_' || ('a' <= ch && ch <= 'z') || ('A' <= ch && ch <= 'Z')
}
fn is_ident_rest(ch: char) -> bool {
is_ident_start(ch) || ('0' <= ch && ch <= '9')
}
impl<'a> Token<'a> {
fn classify(&self) -> &str {
match *self {
Token::LeftParen => "`(`",
Token::RightParen => "`)`",
Token::Ident(..) => "an identifier",
Token::Comma => "`,`",
Token::Equals => "`=`",
Token::String(..) => "a string",
}
}
}
|
{
match self.t.next() {
Some(Ok(ref t)) if token == *t => Ok(()),
Some(Ok(t)) => bail!("expected {}, found {}", token.classify(), t.classify()),
Some(Err(e)) => Err(e),
None => bail!("expected {}, but cfg expr ended", token.classify()),
}
}
|
test_snippet.rs
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use source_map::{SourceMap, FilePathMapping};
use errors::Handler;
use errors::emitter::EmitterWriter;
use std::io;
use std::io::prelude::*;
use rustc_data_structures::sync::Lrc;
use std::str;
use std::sync::{Arc, Mutex};
use std::path::Path;
use syntax_pos::{BytePos, NO_EXPANSION, Span, MultiSpan};
use with_globals;
/// Identify a position in the text by the Nth occurrence of a string.
struct Position {
string: &'static str,
count: usize,
}
struct SpanLabel {
start: Position,
end: Position,
label: &'static str,
}
struct Shared<T: Write> {
data: Arc<Mutex<T>>,
}
impl<T: Write> Write for Shared<T> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.data.lock().unwrap().write(buf)
}
fn flush(&mut self) -> io::Result<()> {
self.data.lock().unwrap().flush()
}
}
fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &str) {
with_globals(|| {
let output = Arc::new(Mutex::new(Vec::new()));
let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
source_map.new_source_file(Path::new("test.rs").to_owned().into(), file_text.to_owned());
let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end);
let mut msp = MultiSpan::from_span(primary_span);
for span_label in span_labels {
let span = make_span(&file_text, &span_label.start, &span_label.end);
msp.push_span_label(span, span_label.label.to_string());
println!("span: {:?} label: {:?}", span, span_label.label);
println!("text: {:?}", source_map.span_to_snippet(span));
}
let emitter = EmitterWriter::new(Box::new(Shared { data: output.clone() }),
Some(source_map.clone()),
false,
false);
let handler = Handler::with_emitter(true, false, Box::new(emitter));
handler.span_err(msp, "foo");
assert!(expected_output.chars().next() == Some('\n'),
"expected output should begin with newline");
let expected_output = &expected_output[1..];
let bytes = output.lock().unwrap();
let actual_output = str::from_utf8(&bytes).unwrap();
println!("expected output:\n------\n{}------", expected_output);
println!("actual output:\n------\n{}------", actual_output);
assert!(expected_output == actual_output)
})
}
fn make_span(file_text: &str, start: &Position, end: &Position) -> Span {
let start = make_pos(file_text, start);
let end = make_pos(file_text, end) + end.string.len(); // just after matching thing ends
assert!(start <= end);
Span::new(BytePos(start as u32), BytePos(end as u32), NO_EXPANSION)
}
fn make_pos(file_text: &str, pos: &Position) -> usize {
let mut remainder = file_text;
let mut offset = 0;
for _ in 0..pos.count {
if let Some(n) = remainder.find(&pos.string) {
offset += n;
remainder = &remainder[n + 1..];
} else {
panic!("failed to find {} instances of {:?} in {:?}",
pos.count,
pos.string,
file_text);
}
}
offset
}
#[test]
fn ends_on_col0() {
test_harness(r#"
fn foo() {
}
"#,
vec![
SpanLabel {
start: Position {
string: "{",
count: 1,
},
end: Position {
string: "}",
count: 1,
},
label: "test",
},
],
r#"
error: foo
--> test.rs:2:10
|
2 | fn foo() {
| __________^
3 | | }
| |_^ test
"#);
}
#[test]
fn ends_on_col2() {
test_harness(r#"
fn foo() {
}
"#,
vec![
SpanLabel {
start: Position {
string: "{",
count: 1,
},
end: Position {
string: "}",
count: 1,
},
label: "test",
},
],
r#"
error: foo
--> test.rs:2:10
|
2 | fn foo() {
| __________^
3 | |
4 | |
5 | | }
| |___^ test
"#);
}
#[test]
fn non_nested() {
test_harness(r#"
fn foo() {
X0 Y0
X1 Y1
X2 Y2
}
"#,
vec![
SpanLabel {
start: Position {
string: "X0",
count: 1,
},
end: Position {
string: "X2",
count: 1,
},
label: "`X` is a good letter",
},
SpanLabel {
start: Position {
string: "Y0",
count: 1,
},
end: Position {
string: "Y2",
count: 1,
},
label: "`Y` is a good letter too",
},
],
r#"
error: foo
--> test.rs:3:3
|
3 | X0 Y0
| ____^__-
| | ___|
| ||
4 | || X1 Y1
5 | || X2 Y2
| ||____^__- `Y` is a good letter too
| |____|
| `X` is a good letter
"#);
}
#[test]
fn nested() {
test_harness(r#"
fn foo() {
X0 Y0
Y1 X1
}
"#,
vec![
SpanLabel {
start: Position {
string: "X0",
count: 1,
},
end: Position {
string: "X1",
count: 1,
},
label: "`X` is a good letter",
},
SpanLabel {
start: Position {
string: "Y0",
count: 1,
},
end: Position {
string: "Y1",
count: 1,
},
label: "`Y` is a good letter too",
},
],
r#"
error: foo
--> test.rs:3:3
|
3 | X0 Y0
| ____^__-
| | ___|
| ||
4 | || Y1 X1
| ||____-__^ `X` is a good letter
| |_____|
| `Y` is a good letter too
"#);
}
#[test]
fn different_overlap() {
test_harness(r#"
fn foo() {
X0 Y0 Z0
X1 Y1 Z1
X2 Y2 Z2
X3 Y3 Z3
}
"#,
vec![
SpanLabel {
start: Position {
string: "Y0",
count: 1,
},
end: Position {
string: "X2",
count: 1,
},
label: "`X` is a good letter",
},
SpanLabel {
start: Position {
string: "Z1",
count: 1,
},
end: Position {
string: "X3",
count: 1,
},
label: "`Y` is a good letter too",
},
],
r#"
error: foo
--> test.rs:3:6
|
3 | X0 Y0 Z0
| ______^
4 | | X1 Y1 Z1
| |_________-
5 | || X2 Y2 Z2
| ||____^ `X` is a good letter
6 | | X3 Y3 Z3
| |_____- `Y` is a good letter too
"#);
}
#[test]
fn triple_overlap() {
test_harness(r#"
fn foo() {
X0 Y0 Z0
X1 Y1 Z1
X2 Y2 Z2
}
"#,
vec![
SpanLabel {
start: Position {
string: "X0",
count: 1,
},
end: Position {
string: "X2",
count: 1,
},
label: "`X` is a good letter",
},
SpanLabel {
start: Position {
string: "Y0",
count: 1,
},
end: Position {
string: "Y2",
count: 1,
},
label: "`Y` is a good letter too",
},
SpanLabel {
start: Position {
string: "Z0",
count: 1,
},
end: Position {
string: "Z2",
count: 1,
},
label: "`Z` label",
},
],
r#"
error: foo
--> test.rs:3:3
|
3 | X0 Y0 Z0
| _____^__-__-
| | ____|__|
| || ___|
| |||
4 | ||| X1 Y1 Z1
5 | ||| X2 Y2 Z2
| |||____^__-__- `Z` label
| ||____|__|
| |____| `Y` is a good letter too
| `X` is a good letter
"#);
}
#[test]
fn minimum_depth() {
test_harness(r#"
fn foo() {
X0 Y0 Z0
X1 Y1 Z1
X2 Y2 Z2
X3 Y3 Z3
}
"#,
vec![
SpanLabel {
start: Position {
string: "Y0",
count: 1,
},
end: Position {
string: "X1",
count: 1,
},
label: "`X` is a good letter",
},
SpanLabel {
start: Position {
string: "Y1",
count: 1,
},
end: Position {
string: "Z2",
count: 1,
},
label: "`Y` is a good letter too",
},
SpanLabel {
start: Position {
string: "X2",
count: 1,
},
end: Position {
string: "Y3",
count: 1,
},
label: "`Z`",
},
],
r#"
error: foo
--> test.rs:3:6
|
3 | X0 Y0 Z0
| ______^
4 | | X1 Y1 Z1
| |____^_-
| ||____|
| | `X` is a good letter
5 | | X2 Y2 Z2
| |____-______- `Y` is a good letter too
| ____|
| |
6 | | X3 Y3 Z3
| |________- `Z`
"#);
}
#[test]
fn non_overlaping() {
test_harness(r#"
fn foo() {
X0 Y0 Z0
X1 Y1 Z1
X2 Y2 Z2
X3 Y3 Z3
}
"#,
vec![
SpanLabel {
start: Position {
string: "X0",
count: 1,
},
end: Position {
string: "X1",
count: 1,
},
label: "`X` is a good letter",
},
SpanLabel {
start: Position {
string: "Y2",
count: 1,
},
end: Position {
string: "Z3",
count: 1,
},
label: "`Y` is a good letter too",
},
],
r#"
error: foo
--> test.rs:3:3
|
3 | / X0 Y0 Z0
4 | | X1 Y1 Z1
| |____^ `X` is a good letter
5 | X2 Y2 Z2
| ______-
6 | | X3 Y3 Z3
| |__________- `Y` is a good letter too
"#);
}
#[test]
fn overlaping_start_and_end() {
test_harness(r#"
fn foo() {
X0 Y0 Z0
X1 Y1 Z1
X2 Y2 Z2
X3 Y3 Z3
}
"#,
vec![
SpanLabel {
start: Position {
string: "Y0",
count: 1,
},
end: Position {
string: "X1",
count: 1,
},
label: "`X` is a good letter",
},
SpanLabel {
start: Position {
string: "Z1",
count: 1,
},
end: Position {
string: "Z3",
count: 1,
},
label: "`Y` is a good letter too",
},
],
r#"
error: foo
--> test.rs:3:6
|
3 | X0 Y0 Z0
| ______^
4 | | X1 Y1 Z1
| |____^____-
| ||____|
| | `X` is a good letter
5 | | X2 Y2 Z2
6 | | X3 Y3 Z3
| |___________- `Y` is a good letter too
"#);
}
#[test]
fn multiple_labels_primary_without_message() {
test_harness(r#"
fn foo() {
a { b { c } d }
}
"#,
vec![
SpanLabel {
start: Position {
string: "b",
count: 1,
},
end: Position {
string: "}",
count: 1,
},
label: "",
},
SpanLabel {
start: Position {
string: "a",
count: 1,
},
end: Position {
string: "d",
count: 1,
},
label: "`a` is a good letter",
},
SpanLabel {
start: Position {
string: "c",
count: 1,
},
end: Position {
string: "c",
count: 1,
},
label: "",
},
],
r#"
error: foo
--> test.rs:3:7
|
3 | a { b { c } d }
| ----^^^^-^^-- `a` is a good letter
"#);
}
#[test]
fn multiple_labels_secondary_without_message() {
test_harness(r#"
fn foo() {
a { b { c } d }
}
"#,
vec![
SpanLabel {
start: Position {
string: "a",
count: 1,
},
end: Position {
string: "d",
count: 1,
},
label: "`a` is a good letter",
},
SpanLabel {
start: Position {
string: "b",
count: 1,
},
end: Position {
string: "}",
count: 1,
},
label: "",
},
],
r#"
error: foo
--> test.rs:3:3
|
3 | a { b { c } d }
| ^^^^-------^^ `a` is a good letter
"#);
}
#[test]
fn multiple_labels_primary_without_message_2() {
test_harness(r#"
fn foo() {
a { b { c } d }
}
"#,
vec![
SpanLabel {
start: Position {
string: "b",
count: 1,
},
end: Position {
string: "}",
count: 1,
},
label: "`b` is a good letter",
},
SpanLabel {
start: Position {
string: "a",
count: 1,
},
end: Position {
string: "d",
count: 1,
},
label: "",
},
SpanLabel {
start: Position {
string: "c",
count: 1,
},
end: Position {
string: "c",
count: 1,
},
label: "",
},
],
r#"
error: foo
--> test.rs:3:7
|
3 | a { b { c } d }
| ----^^^^-^^--
| |
| `b` is a good letter
"#);
}
#[test]
fn multiple_labels_secondary_without_message_2() {
test_harness(r#"
fn foo() {
a { b { c } d }
}
"#,
vec![
SpanLabel {
start: Position {
string: "a",
count: 1,
},
end: Position {
string: "d",
count: 1,
},
label: "",
},
SpanLabel {
start: Position {
string: "b",
count: 1,
},
end: Position {
string: "}",
count: 1,
},
label: "`b` is a good letter",
},
],
r#"
error: foo
--> test.rs:3:3
|
3 | a { b { c } d }
| ^^^^-------^^
| |
| `b` is a good letter
"#);
}
#[test]
fn multiple_labels_secondary_without_message_3() {
test_harness(r#"
fn foo() {
a bc d
}
"#,
vec![
SpanLabel {
start: Position {
string: "a",
count: 1,
},
end: Position {
string: "b",
count: 1,
},
label: "`a` is a good letter",
},
SpanLabel {
start: Position {
string: "c",
count: 1,
},
end: Position {
string: "d",
count: 1,
},
label: "",
},
],
r#"
error: foo
--> test.rs:3:3
|
3 | a bc d
| ^^^^----
| |
| `a` is a good letter
"#);
}
#[test]
fn multiple_labels_without_message() {
test_harness(r#"
fn foo() {
a { b { c } d }
}
"#,
vec![
SpanLabel {
start: Position {
string: "a",
count: 1,
},
end: Position {
string: "d",
count: 1,
},
label: "",
},
SpanLabel {
start: Position {
string: "b",
count: 1,
},
end: Position {
string: "}",
count: 1,
},
label: "",
},
],
r#"
error: foo
--> test.rs:3:3
|
3 | a { b { c } d }
| ^^^^-------^^
"#);
}
#[test]
fn multiple_labels_without_message_2() {
test_harness(r#"
fn foo() {
a { b { c } d }
}
"#,
vec![
SpanLabel {
start: Position {
string: "b",
count: 1,
},
end: Position {
string: "}",
count: 1,
},
label: "",
},
SpanLabel {
start: Position {
string: "a",
count: 1,
},
end: Position {
string: "d",
count: 1,
},
label: "",
},
SpanLabel {
start: Position {
string: "c",
count: 1,
},
end: Position {
string: "c",
count: 1,
},
label: "",
},
],
r#"
error: foo
--> test.rs:3:7
|
3 | a { b { c } d }
| ----^^^^-^^--
"#);
}
#[test]
fn multiple_labels_with_message() {
test_harness(r#"
fn foo() {
a { b { c } d }
}
"#,
vec![
SpanLabel {
start: Position {
string: "a",
count: 1,
},
end: Position {
string: "d",
count: 1,
},
label: "`a` is a good letter",
},
SpanLabel {
start: Position {
string: "b",
count: 1,
},
end: Position {
string: "}",
count: 1,
},
label: "`b` is a good letter",
},
],
r#"
error: foo
--> test.rs:3:3
|
3 | a { b { c } d }
| ^^^^-------^^
| | |
| | `b` is a good letter
| `a` is a good letter
"#);
}
#[test]
fn single_label_with_message() {
test_harness(r#"
fn foo() {
a { b { c } d }
}
"#,
vec![
SpanLabel {
start: Position {
string: "a",
count: 1,
},
end: Position {
string: "d",
count: 1,
},
label: "`a` is a good letter",
},
],
r#"
error: foo
--> test.rs:3:3
|
3 | a { b { c } d }
| ^^^^^^^^^^^^^ `a` is a good letter
"#);
}
#[test]
fn single_label_without_message() {
test_harness(r#"
fn foo() {
a { b { c } d }
}
"#,
vec![
SpanLabel {
start: Position {
string: "a",
count: 1,
},
end: Position {
string: "d",
count: 1,
},
label: "",
},
],
r#"
error: foo
--> test.rs:3:3
|
3 | a { b { c } d }
| ^^^^^^^^^^^^^
"#);
}
#[test]
fn long_snippet()
|
#[test]
fn long_snippet_multiple_spans() {
test_harness(r#"
fn foo() {
X0 Y0 Z0
1
2
3
X1 Y1 Z1
4
5
6
X2 Y2 Z2
7
8
9
10
X3 Y3 Z3
}
"#,
vec![
SpanLabel {
start: Position {
string: "Y0",
count: 1,
},
end: Position {
string: "Y3",
count: 1,
},
label: "`Y` is a good letter",
},
SpanLabel {
start: Position {
string: "Z1",
count: 1,
},
end: Position {
string: "Z2",
count: 1,
},
label: "`Z` is a good letter too",
},
],
r#"
error: foo
--> test.rs:3:6
|
3 | X0 Y0 Z0
| ______^
4 | | 1
5 | | 2
6 | | 3
7 | | X1 Y1 Z1
| |_________-
8 | || 4
9 | || 5
10 | || 6
11 | || X2 Y2 Z2
| ||__________- `Z` is a good letter too
... |
15 | | 10
16 | | X3 Y3 Z3
| |_______^ `Y` is a good letter
"#);
}
|
{
test_harness(r#"
fn foo() {
X0 Y0 Z0
X1 Y1 Z1
1
2
3
4
5
6
7
8
9
10
X2 Y2 Z2
X3 Y3 Z3
}
"#,
vec![
SpanLabel {
start: Position {
string: "Y0",
count: 1,
},
end: Position {
string: "X1",
count: 1,
},
label: "`X` is a good letter",
},
SpanLabel {
start: Position {
string: "Z1",
count: 1,
},
end: Position {
string: "Z3",
count: 1,
},
label: "`Y` is a good letter too",
},
],
r#"
error: foo
--> test.rs:3:6
|
3 | X0 Y0 Z0
| ______^
4 | | X1 Y1 Z1
| |____^____-
| ||____|
| | `X` is a good letter
5 | | 1
6 | | 2
7 | | 3
... |
15 | | X2 Y2 Z2
16 | | X3 Y3 Z3
| |___________- `Y` is a good letter too
"#);
}
|
linked_data_proof_test.go
|
/*
Copyright SecureKey Technologies Inc. All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
*/
package verifiable
import (
"crypto/ed25519"
"crypto/rand"
"errors"
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/hyperledger/aries-framework-go/pkg/doc/jose"
"github.com/hyperledger/aries-framework-go/pkg/doc/signature/jsonld"
"github.com/hyperledger/aries-framework-go/pkg/doc/signature/suite"
"github.com/hyperledger/aries-framework-go/pkg/doc/signature/suite/ecdsasecp256k1signature2019"
"github.com/hyperledger/aries-framework-go/pkg/doc/signature/suite/ed25519signature2018"
"github.com/hyperledger/aries-framework-go/pkg/doc/signature/verifier"
"github.com/hyperledger/aries-framework-go/pkg/kms"
)
func
|
(t *testing.T) {
t.Run("successful public key resolving", func(t *testing.T) {
pubKey, _, err := ed25519.GenerateKey(rand.Reader)
require.NoError(t, err)
kra := &keyResolverAdapter{pubKeyFetcher: SingleKey(pubKey, kms.ED25519)}
resolvedPubKey, err := kra.Resolve("did1#key1")
require.NoError(t, err)
require.Equal(t, []byte(pubKey), resolvedPubKey.Value)
})
t.Run("error wrong key format", func(t *testing.T) {
kra := &keyResolverAdapter{pubKeyFetcher: func(issuerID, keyID string) (*verifier.PublicKey, error) {
return nil, nil
}}
resolvedPubKey, err := kra.Resolve("any")
require.Error(t, err)
require.EqualError(t, err, "wrong id [any] to resolve")
require.Nil(t, resolvedPubKey)
})
t.Run("error at public key resolving (e.g. not found)", func(t *testing.T) {
kra := &keyResolverAdapter{pubKeyFetcher: func(issuerID, keyID string) (*verifier.PublicKey, error) {
return nil, errors.New("no key found")
}}
resolvedPubKey, err := kra.Resolve("did1#key1")
require.Error(t, err)
require.EqualError(t, err, "no key found")
require.Nil(t, resolvedPubKey)
})
}
// This example is generated using https://transmute-industries.github.io/vc-greeting-card
func TestLinkedDataProofSignerAndVerifier(t *testing.T) {
//nolint:lll
vcJSON := `
{
"@context": [
"https://www.w3.org/2018/credentials/v1",
"https://www.w3.org/2018/credentials/examples/v1"
],
"id": "https://example.com/credentials/1872",
"type": [
"VerifiableCredential",
"UniversityDegreeCredential"
],
"issuer": "did:key:z6Mkj7of2aaooXhTJvJ5oCL9ZVcAS472ZBuSjYyXDa4bWT32",
"issuanceDate": "2020-01-17T15:14:09.724Z",
"credentialSubject": {
"id": "did:example:ebfeb1f712ebc6f1c276e12ec21",
"degree": {
"type": "BachelorDegree"
},
"name": "Jayden Doe",
"spouse": "did:example:c276e12ec21ebfeb1f712ebc6f1"
}
}
`
ed25519Signer, err := newCryptoSigner(kms.ED25519Type)
require.NoError(t, err)
vcWithEd25519Proof := prepareVCWithEd25519LDP(t, vcJSON, ed25519Signer)
vcWithEd25519ProofBytes, err := vcWithEd25519Proof.MarshalJSON()
require.NoError(t, err)
ecdsaSigner, err := newCryptoSigner(kms.ECDSASecp256k1TypeIEEEP1363)
require.NoError(t, err)
vcWithSecp256k1Proof := prepareVCWithSecp256k1LDP(t, vcJSON, ecdsaSigner)
vcWithSecp256k1ProofBytes, err := vcWithSecp256k1Proof.MarshalJSON()
require.NoError(t, err)
require.NotEmpty(t, vcWithSecp256k1ProofBytes)
t.Run("Single signature suite", func(t *testing.T) {
verifierSuite := ed25519signature2018.New(
suite.WithVerifier(ed25519signature2018.NewPublicKeyVerifier()),
suite.WithCompactProof())
vcDecoded, err := parseTestCredential(vcWithEd25519ProofBytes,
WithEmbeddedSignatureSuites(verifierSuite),
WithPublicKeyFetcher(SingleKey(ed25519Signer.PublicKeyBytes(), kms.ED25519)))
require.NoError(t, err)
require.Equal(t, vcWithEd25519Proof, vcDecoded)
})
t.Run("Several signature suites", func(t *testing.T) {
verifierSuites := []verifier.SignatureSuite{
ed25519signature2018.New(
suite.WithVerifier(ed25519signature2018.NewPublicKeyVerifier()),
suite.WithCompactProof()),
ecdsasecp256k1signature2019.New(
suite.WithVerifier(ecdsasecp256k1signature2019.NewPublicKeyVerifier())),
}
vcDecoded, err := parseTestCredential(vcWithEd25519ProofBytes,
WithEmbeddedSignatureSuites(verifierSuites...),
WithPublicKeyFetcher(SingleKey(ed25519Signer.PublicKeyBytes(), kms.ED25519)))
require.NoError(t, err)
require.Equal(t, vcWithEd25519Proof, vcDecoded)
jwk, err := jose.JWKFromPublicKey(ecdsaSigner.PublicKey())
require.NoError(t, err)
vcDecoded, err = parseTestCredential(vcWithSecp256k1ProofBytes,
WithEmbeddedSignatureSuites(verifierSuites...),
WithPublicKeyFetcher(func(issuerID, keyID string) (*verifier.PublicKey, error) {
return &verifier.PublicKey{
Type: "EcdsaSecp256k1VerificationKey2019",
Value: ecdsaSigner.PublicKeyBytes(),
JWK: jwk,
}, nil
}))
require.NoError(t, err)
require.Equal(t, vcWithSecp256k1Proof, vcDecoded)
})
t.Run("no signature suite defined", func(t *testing.T) {
vcDecoded, err := parseTestCredential(vcWithEd25519ProofBytes,
WithPublicKeyFetcher(SingleKey(ed25519Signer.PublicKeyBytes(), kms.ED25519)))
require.NoError(t, err)
require.NotNil(t, vcDecoded)
})
}
func prepareVCWithEd25519LDP(t *testing.T, vcJSON string, signer Signer) *Credential {
vc, err := ParseCredential([]byte(vcJSON),
WithJSONLDDocumentLoader(createTestJSONLDDocumentLoader()),
WithDisabledProofCheck())
require.NoError(t, err)
ed25519SignerSuite := ed25519signature2018.New(
suite.WithSigner(signer),
suite.WithCompactProof())
created, err := time.Parse(time.RFC3339, "2018-03-15T00:00:00Z")
require.NoError(t, err)
err = vc.AddLinkedDataProof(&LinkedDataProofContext{
SignatureType: "Ed25519Signature2018",
Suite: ed25519SignerSuite,
SignatureRepresentation: SignatureJWS,
Created: &created,
VerificationMethod: "did:example:123456#key1",
}, jsonld.WithDocumentLoader(createTestJSONLDDocumentLoader()))
require.NoError(t, err)
require.Len(t, vc.Proofs, 1)
return vc
}
func prepareVCWithSecp256k1LDP(t *testing.T, vcJSON string, signer Signer) *Credential {
vc, err := ParseCredential([]byte(vcJSON),
WithJSONLDDocumentLoader(createTestJSONLDDocumentLoader()),
WithDisabledProofCheck())
require.NoError(t, err)
ed25519SignerSuite := ecdsasecp256k1signature2019.New(
suite.WithSigner(signer))
err = vc.AddLinkedDataProof(&LinkedDataProofContext{
SignatureType: "EcdsaSecp256k1Signature2019",
Suite: ed25519SignerSuite,
SignatureRepresentation: SignatureJWS,
VerificationMethod: "did:example:123456#key1",
}, jsonld.WithDocumentLoader(createTestJSONLDDocumentLoader()))
require.NoError(t, err)
require.Len(t, vc.Proofs, 1)
return vc
}
|
Test_keyResolverAdapter_Resolve
|
queue.go
|
package gripper
/*
Storage queue. Meant to be more inspectable then a channel, never block
writes and in future versions store data to disk if the queue gets too large
*/
import (
"time"
"github.com/Workiva/go-datastructures/queue"
)
// TODO: explore disk queue: https://github.com/joncrlsn/dque
func
|
(in, out chan interface{}) {
q := queue.New(1024)
done := false
go func() {
for i := range in {
q.Put(i)
}
done = true
}()
go func() {
for {
i, err := q.Poll(1, time.Millisecond)
if err == nil {
out <- i
} else {
if done {
break
}
}
}
close(out)
}()
}
func NewQueue(path string) (chan<- interface{}, <-chan interface{}) {
in := make(chan interface{}, 10)
out := make(chan interface{}, 10)
go runQueue(in, out)
return in, out
}
|
runQueue
|
progress.py
|
# AUTOGENERATED! DO NOT EDIT! File to edit: nbs/16_callback.progress.ipynb (unless otherwise specified).
__all__ = ['ProgressCallback', 'no_bar', 'ShowGraphCallback', 'CSVLogger']
# Cell
from ..basics import *
# Cell
@docs
class ProgressCallback(Callback):
"A `Callback` to handle the display of progress bars"
run_after=Recorder
def begin_fit(self):
assert hasattr(self.learn, 'recorder')
if self.create_mbar: self.mbar = master_bar(list(range(self.n_epoch)))
if self.learn.logger != noop:
self.old_logger,self.learn.logger = self.logger,self._write_stats
self._write_stats(self.recorder.metric_names)
else: self.old_logger = noop
def begin_epoch(self):
if getattr(self, 'mbar', False): self.mbar.update(self.epoch)
def begin_train(self): self._launch_pbar()
def begin_validate(self): self._launch_pbar()
def after_train(self): self.pbar.on_iter_end()
def after_validate(self): self.pbar.on_iter_end()
def after_batch(self):
self.pbar.update(self.iter+1)
if hasattr(self, 'smooth_loss'): self.pbar.comment = f'{self.smooth_loss:.4f}'
def _launch_pbar(self):
self.pbar = progress_bar(self.dl, parent=getattr(self, 'mbar', None), leave=False)
self.pbar.update(0)
def after_fit(self):
if getattr(self, 'mbar', False):
self.mbar.on_iter_end()
delattr(self, 'mbar')
self.learn.logger = self.old_logger
def _write_stats(self, log):
if getattr(self, 'mbar', False): self.mbar.write([f'{l:.6f}' if isinstance(l, float) else str(l) for l in log], table=True)
_docs = dict(begin_fit="Setup the master bar over the epochs",
begin_epoch="Update the master bar",
begin_train="Launch a progress bar over the training dataloader",
begin_validate="Launch a progress bar over the validation dataloader",
after_train="Close the progress bar over the training dataloader",
after_validate="Close the progress bar over the validation dataloader",
after_batch="Update the current progress bar",
after_fit="Close the master bar")
defaults.callbacks = [TrainEvalCallback, Recorder, ProgressCallback]
# Cell
@patch
@contextmanager
def no_bar(self:Learner):
"Context manager that deactivates the use of progress bars"
has_progress = hasattr(self, 'progress')
if has_progress: self.remove_cb(self.progress)
yield self
if has_progress: self.add_cb(ProgressCallback())
# Cell
class ShowGraphCallback(Callback):
"Update a graph of training and validation loss"
run_after=ProgressCallback
def begin_fit(self):
self.run = not hasattr(self.learn, 'lr_finder') and not hasattr(self, "gather_preds")
self.nb_batches = []
assert hasattr(self.learn, 'progress')
def after_train(self):
|
def after_epoch(self):
"Plot validation loss in the pbar graph"
rec = self.learn.recorder
iters = range_of(rec.losses)
val_losses = [v[1] for v in rec.values]
x_bounds = (0, (self.n_epoch - len(self.nb_batches)) * self.nb_batches[0] + len(rec.losses))
y_bounds = (0, max((max(Tensor(rec.losses)), max(Tensor(val_losses)))))
self.progress.mbar.update_graph([(iters, rec.losses), (self.nb_batches, val_losses)], x_bounds, y_bounds)
# Cell
class CSVLogger(Callback):
run_after=Recorder
"Log the results displayed in `learn.path/fname`"
def __init__(self, fname='history.csv', append=False):
self.fname,self.append = Path(fname),append
def read_log(self):
"Convenience method to quickly access the log."
return pd.read_csv(self.path/self.fname)
def begin_fit(self):
"Prepare file with metric names."
self.path.parent.mkdir(parents=True, exist_ok=True)
self.file = (self.path/self.fname).open('a' if self.append else 'w')
self.file.write(','.join(self.recorder.metric_names) + '\n')
self.old_logger,self.learn.logger = self.logger,self._write_line
def _write_line(self, log):
"Write a line with `log` and call the old logger."
self.file.write(','.join([str(t) for t in log]) + '\n')
self.old_logger(log)
def after_fit(self):
"Close the file and clean up."
self.file.close()
self.learn.logger = self.old_logger
|
self.nb_batches.append(self.train_iter)
|
pointer_smuggling.rs
|
static mut PTR: *mut u8 = 0 as *mut _;
fn fun1(x: &mut u8) {
unsafe {
PTR = x;
}
}
fn fun2()
|
fn main() {
let mut val = 0;
let val = &mut val;
fun1(val);
*val = 2; // this invalidates any raw ptrs `fun1` might have created.
fun2(); // if they now use a raw ptr they break our reference
}
|
{
// Now we use a pointer we are not allowed to use
let _x = unsafe { *PTR }; //~ ERROR borrow stack
}
|
tricks.js
|
/* globals Vue, Vuetify, firebase */
const gAuthProvider = new firebase.auth.GoogleAuthProvider()
const admins = [
'Kpz3afszjBR0qwZYUrKURRJx2cm2', // Dylan
'g0G3A7FxieN333lZ2RKclkmv9Uw1' // Svante
]
firebase.firestore().enablePersistence()
|
el: '#app',
vuetify: new Vuetify({
theme: {
themes: {
light: {
primary: '#fe3500',
secondary: '#ffc107',
accent: '#ffeb3b',
error: '#fe3500',
warning: '#ff9800',
info: '#3f51b5',
success: '#4caf50'
}
}
}
}),
data: () => ({
authDialog: 'unchecked',
uid: null,
drawer: false,
discipline: 'sr',
tab: null,
trick: null,
search: '',
tricks: {
sr: [],
dd: [],
wh: []
},
loaded: {
sr: false,
dd: false,
wh: false
},
types: [],
saving: false,
valid: false,
reqRule: [v => !!v || 'Required'],
error: false,
newPrereqId: null
}),
computed: {
levelArrays () {
const out = {}
for (const trick of this.tricks[this.discipline]) {
if (!Object.prototype.hasOwnProperty.call(out, trick.level)) out[trick.level] = []
out[trick.level].push(trick)
}
return out
},
tricksWithoutCurrent () {
return this.tricks[this.discipline].filter(trick => trick.id !== this.trick.id)
}
},
created () {
firebase.auth().onAuthStateChanged(user => {
if (user) {
if (admins.includes(user.uid)) {
this.authDialog = 'authed'
this.uid = user.uid
this.load()
} else {
this.uid = null
this.authDialog = 'notadmin'
}
} else {
this.authDialog = 'unchecked'
this.uid = null
}
})
},
methods: {
login () {
firebase.auth().signInWithPopup(gAuthProvider)
},
logout () {
firebase.auth().signOut()
},
changeDiscipline (discipline) {
this.discipline = discipline
this.load()
},
getTrick (id) {
return this.tricks[this.discipline].find(trick => trick.id === id)
},
load () {
const discipline = `${this.discipline}`
firebase.firestore().collection('tricks' + discipline.toUpperCase()).get().then(qSnap => {
this.loaded[discipline] = true
qSnap.forEach(dSnap => {
const trickIdx = this.tricks[discipline].findIndex(trick => trick.id === dSnap.id)
if (trickIdx !== -1) {
this.tricks[discipline].splice(trickIdx, 1, {
id: dSnap.id,
...dSnap.data()
})
} else {
this.tricks[discipline].push({
id: dSnap.id,
...dSnap.data()
})
}
})
})
firebase.firestore().collection('i18n').doc('en').collection('tricktypes').doc('translated').get().then(dSnap => {
const keys = Object.keys(dSnap.data())
this.$set(this, 'types', keys)
})
},
edit (trick) {
if (!trick.videos) this.$set(trick, 'videos', {})
if (!trick.levels) this.$set(trick, 'levels', {})
if (!trick.levels.ijru) this.$set(trick.levels, 'ijru', {})
if (!trick.levels.ijru.verified) this.$set(trick.levels.ijru, 'verified', {})
this.$set(this, 'trick', trick)
},
close () {
this.$set(this, 'trick', null)
this.$set(this, 'newPrereqId', null)
},
save () {
const discipline = `${this.discipline}`
const { id, ...trick } = this.trick
this.$refs.form.validate()
if (!this.valid || !trick.name || !trick.level || !trick.type) {
this.error = 'From is missing required fields'
return
}
this.saving = true
if (!id || !trick.createdAt) trick.createdAt = firebase.firestore.FieldValue.serverTimestamp()
trick.updatedAt = firebase.firestore.FieldValue.serverTimestamp()
const baseRef = firebase.firestore().collection('tricks' + discipline.toUpperCase())
const promise = id
? baseRef.doc(id).set(trick, { merge: true })
: baseRef.add(trick)
promise.then(dSnap => {
this.saving = false
if (!dSnap) return this.close()
const trickIdx = this.tricks[discipline].findIndex(trick => trick.id === dSnap.id)
if (trickIdx !== -1) {
this.tricks[discipline].splice(trickIdx, 1, {
id: dSnap.id,
...trick
})
} else {
this.tricks[discipline].push({
id: dSnap.id,
...trick
})
}
this.close()
})
.catch(err => {
this.error = err.message
})
},
generateSlug () {
this.$set(this.trick, 'slug', this.trick.name.toLocaleLowerCase().replace(/[^a-z]/g, '-').replace(/-{2,}/g, '-'))
},
newAlternativeName () {
if (!this.trick.alternativeNames) this.$set(this.trick, 'alternativeNames', [])
this.trick.alternativeNames.push('')
},
removeAlternativeName (idx) {
this.trick.alternativeNames.splice(idx, 1)
},
setAlternativeName (idx, name) {
this.trick.alternativeNames.splice(idx, 1, name)
},
addPrereq (newPrereqId) {
console.log(newPrereqId)
if (!this.trick.prerequisites) this.$set(this.trick, 'prerequisites', [])
const discipline = `${this.discipline}`
this.trick.prerequisites.push({
id: newPrereqId,
ref: firebase.firestore().collection('tricks' + discipline.toUpperCase()).doc(newPrereqId)
})
this.newPrereqId = null
},
removePrereq (idx) {
this.trick.prerequisites.splice(idx, 1)
},
verifyTrick (trick, vLevel, verified) {
this.$set(trick.levels.ijru.verified, 'date', firebase.firestore.Timestamp.now())
this.$set(trick.levels.ijru.verified, 'vLevel', vLevel)
this.$set(trick.levels.ijru.verified, 'verified', verified)
this.$set(trick.levels.ijru.verified, 'verifier', this.uid)
}
},
filters: {
timestamp (value) {
if (!value) return ''
if (!(value instanceof firebase.firestore.Timestamp)) return value
const date = dateFormatter.format(value.toDate())
return date
}
}
})
|
const dateFormatter = new Intl.DateTimeFormat()
new Vue({
|
mail.go
|
// Copyright 2019 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package mail
import (
"fmt"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/notification/base"
"code.gitea.io/gitea/services/mailer"
)
type mailNotifier struct {
base.NullNotifier
}
var (
_ base.Notifier = &mailNotifier{}
)
// NewNotifier create a new mailNotifier notifier
func NewNotifier() base.Notifier
|
func (m *mailNotifier) NotifyCreateIssueComment(doer *models.User, repo *models.Repository,
issue *models.Issue, comment *models.Comment, mentions []*models.User) {
var act models.ActionType
if comment.Type == models.CommentTypeClose {
act = models.ActionCloseIssue
} else if comment.Type == models.CommentTypeReopen {
act = models.ActionReopenIssue
} else if comment.Type == models.CommentTypeComment {
act = models.ActionCommentIssue
} else if comment.Type == models.CommentTypeCode {
act = models.ActionCommentIssue
} else if comment.Type == models.CommentTypePullPush {
act = 0
}
if err := mailer.MailParticipantsComment(comment, act, issue, mentions); err != nil {
log.Error("MailParticipantsComment: %v", err)
}
}
func (m *mailNotifier) NotifyNewIssue(issue *models.Issue, mentions []*models.User) {
if err := mailer.MailParticipants(issue, issue.Poster, models.ActionCreateIssue, mentions); err != nil {
log.Error("MailParticipants: %v", err)
}
}
func (m *mailNotifier) NotifyIssueChangeStatus(doer *models.User, issue *models.Issue, actionComment *models.Comment, isClosed bool) {
var actionType models.ActionType
issue.Content = ""
if issue.IsPull {
if isClosed {
actionType = models.ActionClosePullRequest
} else {
actionType = models.ActionReopenPullRequest
}
} else {
if isClosed {
actionType = models.ActionCloseIssue
} else {
actionType = models.ActionReopenIssue
}
}
if err := mailer.MailParticipants(issue, doer, actionType, nil); err != nil {
log.Error("MailParticipants: %v", err)
}
}
func (m *mailNotifier) NotifyNewPullRequest(pr *models.PullRequest, mentions []*models.User) {
if err := mailer.MailParticipants(pr.Issue, pr.Issue.Poster, models.ActionCreatePullRequest, mentions); err != nil {
log.Error("MailParticipants: %v", err)
}
}
func (m *mailNotifier) NotifyPullRequestReview(pr *models.PullRequest, r *models.Review, comment *models.Comment, mentions []*models.User) {
var act models.ActionType
if comment.Type == models.CommentTypeClose {
act = models.ActionCloseIssue
} else if comment.Type == models.CommentTypeReopen {
act = models.ActionReopenIssue
} else if comment.Type == models.CommentTypeComment {
act = models.ActionCommentPull
}
if err := mailer.MailParticipantsComment(comment, act, pr.Issue, mentions); err != nil {
log.Error("MailParticipantsComment: %v", err)
}
}
func (m *mailNotifier) NotifyPullRequestCodeComment(pr *models.PullRequest, comment *models.Comment, mentions []*models.User) {
if err := mailer.MailMentionsComment(pr, comment, mentions); err != nil {
log.Error("MailMentionsComment: %v", err)
}
}
func (m *mailNotifier) NotifyIssueChangeAssignee(doer *models.User, issue *models.Issue, assignee *models.User, removed bool, comment *models.Comment) {
// mail only sent to added assignees and not self-assignee
if !removed && doer.ID != assignee.ID && assignee.EmailNotifications() == models.EmailNotificationsEnabled {
ct := fmt.Sprintf("Assigned #%d.", issue.Index)
mailer.SendIssueAssignedMail(issue, doer, ct, comment, []string{assignee.Email})
}
}
func (m *mailNotifier) NotifyPullReviewRequest(doer *models.User, issue *models.Issue, reviewer *models.User, isRequest bool, comment *models.Comment) {
if isRequest && doer.ID != reviewer.ID && reviewer.EmailNotifications() == models.EmailNotificationsEnabled {
ct := fmt.Sprintf("Requested to review %s.", issue.HTMLURL())
mailer.SendIssueAssignedMail(issue, doer, ct, comment, []string{reviewer.Email})
}
}
func (m *mailNotifier) NotifyMergePullRequest(pr *models.PullRequest, doer *models.User) {
if err := pr.LoadIssue(); err != nil {
log.Error("pr.LoadIssue: %v", err)
return
}
pr.Issue.Content = ""
if err := mailer.MailParticipants(pr.Issue, doer, models.ActionMergePullRequest, nil); err != nil {
log.Error("MailParticipants: %v", err)
}
}
func (m *mailNotifier) NotifyPullRequestPushCommits(doer *models.User, pr *models.PullRequest, comment *models.Comment) {
var err error
if err = comment.LoadIssue(); err != nil {
log.Error("comment.LoadIssue: %v", err)
return
}
if err = comment.Issue.LoadRepo(); err != nil {
log.Error("comment.Issue.LoadRepo: %v", err)
return
}
if err = comment.Issue.LoadPullRequest(); err != nil {
log.Error("comment.Issue.LoadPullRequest: %v", err)
return
}
if err = comment.Issue.PullRequest.LoadBaseRepo(); err != nil {
log.Error("comment.Issue.PullRequest.LoadBaseRepo: %v", err)
return
}
if err := comment.LoadPushCommits(); err != nil {
log.Error("comment.LoadPushCommits: %v", err)
}
comment.Content = ""
m.NotifyCreateIssueComment(doer, comment.Issue.Repo, comment.Issue, comment, nil)
}
func (m *mailNotifier) NotifyPullRevieweDismiss(doer *models.User, review *models.Review, comment *models.Comment) {
if err := mailer.MailParticipantsComment(comment, models.ActionPullReviewDismissed, review.Issue, []*models.User{}); err != nil {
log.Error("MailParticipantsComment: %v", err)
}
}
func (m *mailNotifier) NotifyNewRelease(rel *models.Release) {
if err := rel.LoadAttributes(); err != nil {
log.Error("NotifyNewRelease: %v", err)
return
}
if rel.IsDraft || rel.IsPrerelease {
return
}
mailer.MailNewRelease(rel)
}
func (m *mailNotifier) NotifyRepoPendingTransfer(doer, newOwner *models.User, repo *models.Repository) {
if err := mailer.SendRepoTransferNotifyMail(doer, newOwner, repo); err != nil {
log.Error("NotifyRepoPendingTransfer: %v", err)
}
}
|
{
return &mailNotifier{}
}
|
v2-test-cases.spec.ts
|
import { SchematicTestRunner, UnitTestTree } from '@angular-devkit/schematics/testing';
import { join } from 'path';
import {
createFileSystemTestApp,
migrationCollection,
readFileContent,
resolveBazelDataFile,
runTestCases,
} from './index.spec';
import { MOCK_LAYOUT } from './v2/MOCK_LAYOUT';
describe('v2', () => {
const migrationName = 'migration-v2';
describe('upgrade test cases', () => {
/**
|
let testCasesOutputPath: string;
let testCasesLogOutput: string;
beforeAll(async () => {
const testCaseInputs = testCases.reduce((inputs, testCaseName) => {
inputs[testCaseName] = resolveBazelDataFile(`${testCaseName}_input.ts`);
return inputs;
}, {});
const { tempPath, logOutput } = await runTestCases(migrationName, testCaseInputs);
testCasesOutputPath = join(tempPath, 'projects/ng-alain/src/test-cases/');
testCasesLogOutput = logOutput;
});
// Iterates through every test case directory and generates a jasmine test block that will
// verify that the update schematics properly updated the test input to the expected output.
testCases.forEach(testCaseName => {
const expectedOutputPath = resolveBazelDataFile(`${testCaseName}_expected_output.ts`);
it(`should apply update schematics to test case: ${testCaseName}`, () => {
const output = readFileContent(join(testCasesOutputPath, `${testCaseName}.ts`));
const expected = readFileContent(expectedOutputPath);
expect(output).toBe(expected);
});
});
});
describe('layout', () => {
let tree: UnitTestTree;
beforeEach(() => {
const runner = new SchematicTestRunner('schematics', migrationCollection);
tree = createFileSystemTestApp(runner).appTree;
Object.keys(MOCK_LAYOUT).forEach(path => tree.create(path, MOCK_LAYOUT[path]));
runner.runSchematic(migrationName, {}, tree);
});
it('should working', () => {
const style = tree.readContent('src/styles.less');
expect(style).toContain(`~@delon/theme/styles/layout/default/index`);
const defaultCompHTML = tree.readContent('src/app/layout/default/default.component.html');
expect(defaultCompHTML).toContain(`alain-default__progress-bar`);
const headerCompHTML = tree.readContent(
'src/app/layout/default/header/header.component.html',
);
expect(headerCompHTML).toContain(`alain-default__header-logo-link`);
const headerSearchCompHTML = tree.readContent(
'src/app/layout/default/header/components/search.component.ts',
);
expect(headerSearchCompHTML).toContain(`alain-default__search-focus`);
const sidebarCompHTML = tree.readContent(
'src/app/layout/default/sidebar/sidebar.component.html',
);
expect(sidebarCompHTML).toContain(`alain-default__aside-inner`);
});
});
describe('dom', () => {
const testCases = ['v2/dom'];
let tree: UnitTestTree;
beforeEach(() => {
const runner = new SchematicTestRunner('schematics', migrationCollection);
tree = createFileSystemTestApp(runner).appTree;
testCases.forEach(testCaseName => {
tree.create(
`src/app/${testCaseName}.ts`,
readFileContent(resolveBazelDataFile(`${testCaseName}_input.ts`)),
);
});
runner.runSchematic(migrationName, {}, tree);
});
testCases.forEach(testCaseName => {
const expectedOutputPath = resolveBazelDataFile(`${testCaseName}_expected_output.ts`);
it(`should apply update schematics to test case: ${testCaseName}`, () => {
const output = tree.readContent(`src/app/${testCaseName}.ts`);
const expected = readFileContent(expectedOutputPath);
expect(output).toBe(expected);
});
});
});
});
|
* Name of test cases that will be used to verify that update schematics properly update
* a developers application.
*/
const testCases = ['v2/css-selectors'];
|
variable.rs
|
use crate::{
constants::*, error::*, semantic_analysis::TypedExpression, type_engine::TypeId,
type_engine::*, Ident, TypeParameter, Visibility,
};
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum VariableMutability {
// private + mutable
Mutable,
// private + immutable
Immutable,
// public + immutable
ExportedConst,
// public + mutable is invalid
}
impl Default for VariableMutability {
fn default() -> Self {
VariableMutability::Immutable
}
}
impl VariableMutability {
pub fn is_mutable(&self) -> bool {
matches!(self, VariableMutability::Mutable)
}
pub fn visibility(&self) -> Visibility {
match self {
VariableMutability::ExportedConst => Visibility::Public,
_ => Visibility::Private,
}
}
pub fn is_immutable(&self) -> bool {
!self.is_mutable()
}
}
impl From<bool> for VariableMutability {
fn from(o: bool) -> Self {
if o {
VariableMutability::Mutable
} else {
VariableMutability::Immutable
}
}
}
// as a bool, true means mutable
impl From<VariableMutability> for bool {
fn from(o: VariableMutability) -> bool {
o.is_mutable()
}
}
#[derive(Clone, Debug, Eq)]
pub struct TypedVariableDeclaration {
pub(crate) name: Ident,
pub(crate) body: TypedExpression,
pub(crate) is_mutable: VariableMutability,
pub(crate) type_ascription: TypeId,
pub(crate) const_decl_origin: bool,
}
// NOTE: Hash and PartialEq must uphold the invariant:
// k1 == k2 -> hash(k1) == hash(k2)
// https://doc.rust-lang.org/std/collections/struct.HashMap.html
impl PartialEq for TypedVariableDeclaration {
fn eq(&self, other: &Self) -> bool {
self.name == other.name
&& self.body == other.body
&& self.is_mutable == other.is_mutable
&& look_up_type_id(self.type_ascription) == look_up_type_id(other.type_ascription)
|
}
}
impl TypedVariableDeclaration {
pub(crate) fn copy_types(&mut self, type_mapping: &[(TypeParameter, TypeId)]) {
self.type_ascription =
match look_up_type_id(self.type_ascription).matches_type_parameter(type_mapping) {
Some(matching_id) => insert_type(TypeInfo::Ref(matching_id)),
None => insert_type(look_up_type_id_raw(self.type_ascription)),
};
self.body.copy_types(type_mapping)
}
}
// there are probably more names we should check here, this is the only one that will result in an
// actual issue right now, though
pub fn check_if_name_is_invalid(name: &Ident) -> CompileResult<()> {
INVALID_NAMES
.iter()
.find_map(|x| {
if *x == name.as_str() {
Some(err(
vec![],
[CompileError::InvalidVariableName { name: name.clone() }].to_vec(),
))
} else {
None
}
})
.unwrap_or_else(|| ok((), vec![], vec![]))
}
|
&& self.const_decl_origin == other.const_decl_origin
|
test_01_07.py
|
def
|
():
assert "spacy.load" in __solution__, "¿Estás llamando a spacy.load?"
assert nlp.meta["lang"] == "es", "¿Estás cargando el modelo correcto?"
assert nlp.meta["name"] == "core_news_sm", "¿Estás cargando el modelo correcto?"
assert "nlp(text)" in __solution__, "¿Procesaste el texto correctamente?"
assert (
"print(doc.text)" in __solution__
), "¿Estás imprimiendo en pantalla el texto del Doc?"
__msg__.good(
"¡Bien hecho! Ahora que practicaste cargando modelos, miremos algunas de sus predicciones."
)
|
test
|
bench_serialization.rs
|
// Copyright 2017 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate protobuf;
use std::collections::HashMap;
use test::Bencher;
use rand::{thread_rng, Rng};
use protobuf::Message;
use kvproto::eraftpb::Entry;
use kvproto::raft_cmdpb::{CmdType, RaftCmdRequest, Request};
#[inline]
fn gen_rand_str(len: usize) -> Vec<u8> {
let mut rand_str = vec![0; len];
thread_rng().fill_bytes(&mut rand_str);
rand_str
}
#[inline]
fn generate_requests(map: &HashMap<&[u8], &[u8]>) -> Vec<Request> {
let mut reqs = vec![];
for (key, value) in map {
let mut r = Request::new();
r.set_cmd_type(CmdType::Put);
r.mut_put().set_cf("tikv".to_owned());
r.mut_put().set_key(key.to_vec());
r.mut_put().set_value(value.to_vec());
reqs.push(r);
}
reqs
}
fn encode(map: &HashMap<&[u8], &[u8]>) -> Vec<u8> {
let mut e = Entry::new();
let mut cmd = RaftCmdRequest::new();
let reqs = generate_requests(map);
cmd.set_requests(protobuf::RepeatedField::from_vec(reqs));
let cmd_msg = cmd.write_to_bytes().unwrap();
e.set_data(cmd_msg);
e.write_to_bytes().unwrap()
}
fn decode(data: &[u8])
|
#[bench]
fn bench_encode_one(b: &mut Bencher) {
let key = gen_rand_str(30);
let value = gen_rand_str(256);
let mut map: HashMap<&[u8], &[u8]> = HashMap::new();
map.insert(&key, &value);
b.iter(|| { encode(&map); });
}
#[bench]
fn bench_decode_one(b: &mut Bencher) {
let key = gen_rand_str(30);
let value = gen_rand_str(256);
let mut map: HashMap<&[u8], &[u8]> = HashMap::new();
map.insert(&key, &value);
let data = encode(&map);
b.iter(|| { decode(&data); });
}
#[bench]
fn bench_encode_two(b: &mut Bencher) {
let key_for_lock = gen_rand_str(30);
let value_for_lock = gen_rand_str(10);
let key_for_data = gen_rand_str(30);
let value_for_data = gen_rand_str(256);
let mut map: HashMap<&[u8], &[u8]> = HashMap::new();
map.insert(&key_for_lock, &value_for_lock);
map.insert(&key_for_data, &value_for_data);
b.iter(|| { encode(&map); });
}
#[bench]
fn bench_decode_two(b: &mut Bencher) {
let key_for_lock = gen_rand_str(30);
let value_for_lock = gen_rand_str(10);
let key_for_data = gen_rand_str(30);
let value_for_data = gen_rand_str(256);
let mut map: HashMap<&[u8], &[u8]> = HashMap::new();
map.insert(&key_for_lock, &value_for_lock);
map.insert(&key_for_data, &value_for_data);
let data = encode(&map);
b.iter(|| { decode(&data); });
}
|
{
let mut entry = Entry::new();
entry.merge_from_bytes(data).unwrap();
let mut cmd = RaftCmdRequest::new();
cmd.merge_from_bytes(entry.get_data()).unwrap();
}
|
traits.rs
|
use super::structs::{PaginatedQuery, Response};
use diesel::pg::Pg;
use diesel::prelude::*;
use diesel::query_builder::*;
use diesel::query_dsl::methods::LoadQuery;
use diesel::sql_types::{BigInt, HasSqlType};
pub trait Paginate: Sized {
fn page(self, page: i64) -> PaginatedQuery<Self>;
}
impl<T> Paginate for T {
fn page(self, page: i64) -> PaginatedQuery<Self> {
PaginatedQuery {
query: self,
per_page: 15,
page,
}
}
}
impl<T> QueryFragment<Pg> for PaginatedQuery<T>
where
T: QueryFragment<Pg>,
{
fn walk_ast(&self, mut out: AstPass<Pg>) -> QueryResult<()> {
out.push_sql("SELECT *, COUNT(*) OVER () FROM (");
self.query.walk_ast(out.reborrow())?;
out.push_sql(") t LIMIT ");
out.push_bind_param::<BigInt, _>(&self.per_page)?;
out.push_sql(" OFFSET ");
let offset = (self.page - 1) * self.per_page;
out.push_bind_param::<BigInt, _>(&offset)?;
Ok(())
}
}
impl<T: Query> Query for PaginatedQuery<T> {
type SqlType = (T::SqlType, BigInt);
}
impl<T> RunQueryDsl<PgConnection> for PaginatedQuery<T> {}
impl<T> PaginatedQuery<T> {
/// Set per page size on the query
pub fn per_page(self, per_page: i64) -> Self {
PaginatedQuery { per_page, ..self }
}
/// Load paginated data
pub fn load_paginated<U>(self, conn: &PgConnection) -> QueryResult<Response<U>>
where
Self: LoadQuery<PgConnection, (U, i64)>,
{
let page = self.page;
let per_page = self.per_page;
let results = self.load::<(U, i64)>(conn)?;
let total = results.get(0).map(|x| x.1).unwrap_or(0);
let data = results.into_iter().map(|x| x.0).collect();
let mut last_page = 1;
if total > 0 {
last_page = (total as f64 / per_page as f64).ceil() as i64;
}
Ok(Response {
page,
per_page,
total,
last_page,
data,
})
}
|
Query + QueryId + QueryFragment<Pg> + LoadQuery<PgConnection, U>
{
fn load_paginated(
self,
conn: &PgConnection,
page: Option<i64>,
per_page: Option<i64>,
) -> QueryResult<Response<U>>;
}
impl<T, U> LoadPaginated<U> for T
where
Self: Query + QueryId + QueryFragment<Pg> + LoadQuery<PgConnection, U>,
U: Queryable<Self::SqlType, Pg>,
Pg: HasSqlType<Self::SqlType>,
{
fn load_paginated(
self,
conn: &PgConnection,
page: Option<i64>,
per_page: Option<i64>,
) -> QueryResult<Response<U>> {
let page = page.unwrap_or(1);
let mut query = self.page(page);
if let Some(per_page) = per_page {
query = query.per_page(per_page);
}
query.load_paginated::<U>(conn)
}
}
|
}
pub trait LoadPaginated<U>:
|
books_views.py
|
from django.shortcuts import render
from books import models # 导入models文件
from django.contrib.auth.decorators import login_required,permission_required
from mybooks import settings
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
import re
import os
from PIL import Image
import re
import base64
from io import BytesIO
def highlight(matched):
value = matched.group('value')
return '<strong style="background:red">'+value+'</strong>'
def abstract(text):
front=text.partition('<strong style="background:red">')
rear=front[2].rpartition('</strong>')
return front[0][-50:]+'<strong style="background:red">'+rear[0]+'</strong>'+rear[2][:50]
def getleginfo(BKname,page):
return '<font color="purple">'+BKname.author+':'+'《'+BKname.bookname+'》,'+BKname.pubaddress+':'+BKname.publisher+','+BKname.year+'年,'+page+'</font>'
def search(request):
if request.method == 'PO
|
uest.POST.get('key_Id')
imagetext = models.BooksImageText.objects.get(id=key_Id)
with open(imagetext.txt,'r',encoding='UTF-8') as txtfile:
textcontent=txtfile.read()
img = Image.open(imagetext.image)
output_buffer = BytesIO()
img.save(output_buffer, format='png')
byte_data = output_buffer.getvalue()
base64_data = base64.b64encode(byte_data)
BKname=models.Info.objects.get(book_id=imagetext.book_id)
copyright=getleginfo(BKname,imagetext.page)
return render(request, 'books_view.html', {'copyright':copyright,'base64_data':str(base64_data,'utf-8'),'imagetext': imagetext,'textcontent':textcontent,'before':int(key_Id)-1,'next':int(key_Id)+1})
def removeBom(file):
'''移除UTF-8文件的BOM字节'''
BOM = b'\xef\xbb\xbf'
existBom = lambda s: True if s == BOM else False
f = open(file, 'rb')
if existBom(f.read(3)):
fbody = f.read()
# f.close()
with open(file, 'wb') as f:
f.write(fbody)
def edit(request):
if request.method == 'POST':
textarea = request.POST.get('textarea')
key_Id = request.POST.get('key_Id')
if textarea != None:
formerImageText=models.BooksImageText.objects.get(id=str(int(key_Id)-1))
formertext=''
#取上一条记录的后30字
text=re.sub('<.+?>','',formerImageText.text)
text=text.strip().replace('\t','').replace('\r','').replace('\n','').replace(' ','').replace(' ','')
if len(text) > 30:
formertext=text[-30:]
else:
formertext=text
#两条记录去除html标记后存入text供检索
current_text=re.sub('<.+?>','',textarea)
current_text=current_text.strip().replace('\t','').replace('\r','').replace('\n','').replace(' ','').replace(' ','')
models.BooksImageText.objects.filter(id=key_Id).update(text=formertext+current_text)
#用新获取的textarea数据更新底层txt文件,要删去多余的换行
imagetext = models.BooksImageText.objects.get(id=key_Id)
removeBom(imagetext.txt)
with open(imagetext.txt,'w',encoding='UTF-8') as txtfile:
txtfile.write('%s' % (textarea.replace('\n','')))
#textarea不存在,不是保存,而是获取textarea
imagetext = models.BooksImageText.objects.get(id=key_Id)
if os.path.exists(imagetext.txt):
removeBom(imagetext.txt)
with open(imagetext.txt,'r',encoding='UTF-8') as txtfile:
textcontent=txtfile.read()
img = Image.open(imagetext.image)
output_buffer = BytesIO()
img.save(output_buffer, format='png')
byte_data = output_buffer.getvalue()
base64_data = base64.b64encode(byte_data)
BKname=models.Info.objects.get(book_id=imagetext.book_id)
copyright=getleginfo(BKname,imagetext.page)
return render(request, 'books_edit.html', {'copyright':copyright,'base64_data':str(base64_data,'utf-8'),'imagetext': imagetext,'textcontent':textcontent,'before':int(key_Id)-1,'next':int(key_Id)+1})
|
ST':
query_str = request.POST.get('query_str').strip()
query_str=re.sub(' +', ' ', query_str)
bookname = request.POST.get('bookname')
if query_str != '':
if ' ' in query_str:
queryregex = query_str.replace(' ', '.*')
imagetext = models.BooksImageText.objects.filter(text__regex=queryregex, book_id__bookname__contains=bookname).order_by('id')
else:
imagetext = models.BooksImageText.objects.filter(text__regex=query_str, book_id__bookname__contains=bookname).order_by('id')
#分页
paginator = Paginator(imagetext, 20) # 每页条数
page = request.POST.get('page')
try:
pagesImagetext = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
pagesImagetext = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
pagesImagetext = paginator.page(paginator.num_pages)
#高亮、取摘要
if ' ' in query_str:
for it in pagesImagetext:
sub_query_strs=query_str.split(' ')
for sqs in sub_query_strs:
it.text=re.sub('(?P<value>'+sqs+')', highlight, it.text)
BKname=models.Info.objects.get(book_id=it.book_id)
copyright=getleginfo(BKname,it.page)
it.text=abstract(it.text)+'<br>'+copyright
else:
for it in pagesImagetext:
BKname=models.Info.objects.get(book_id=it.book_id)
copyright=getleginfo(BKname,it.page)
it.text=abstract(re.sub('(?P<value>'+query_str+')', highlight, it.text))+'<br>'+copyright
return render(request, 'index.html', {'returninfo':'共在'+str(len(imagetext))+'个页面上找到检索信息。','imagetext': pagesImagetext,'query_str': query_str,'bookname': bookname})
else:
return render(request, 'index.html')
else:
return render(request, 'index.html')
def view(request):
if request.method == 'POST':
key_Id = req
|
sum.go
|
package mk
import (
"bufio"
"context"
"golang.org/x/mod/sumdb/storage"
"gopkg.in/yaml.v2"
"io"
"os"
"path/filepath"
"sync"
)
type YamlSumStorageFile struct {
Path string
Perm os.FileMode
}
type yamlStorageFileTransaction struct {
sum map[string]string
dirty, readOnly bool
lock sync.Mutex
}
func (y *yamlStorageFileTransaction) ReadValue(ctx context.Context, key string) (value string, err error) {
y.lock.Lock()
defer y.lock.Unlock()
return y.sum[key], nil
}
func (y *yamlStorageFileTransaction) ReadValues(ctx context.Context, keys []string) (values []string, err error) {
y.lock.Lock()
defer y.lock.Unlock()
result := make([]string, len(keys))
for i := range keys {
result[i] = y.sum[keys[i]]
}
return result, nil
}
func (y *yamlStorageFileTransaction) BufferWrites(writes []storage.Write) error {
y.lock.Lock()
defer y.lock.Unlock()
if len(writes) == 0 {
return nil
}
y.dirty = true
for i := range writes {
y.sum[writes[i].Key] = writes[i].Value
}
return nil
}
func (j *YamlSumStorageFile) read() (*yamlStorageFileTransaction, error) {
var tr yamlStorageFileTransaction
file, err := os.Open(j.Path)
if os.IsNotExist(err) {
tr.sum = make(map[string]string)
return &tr, nil
} else if err != nil {
return nil, err
}
defer func() { _ = file.Close() }()
rdr := bufio.NewReader(file)
if _, err = rdr.Peek(1); err == io.EOF {
return &tr, nil
} else if err != nil
|
if err := yaml.NewDecoder(rdr).Decode(&tr.sum); err != nil {
return nil, err
}
return &tr, nil
}
func (j *YamlSumStorageFile) ReadOnly(ctx context.Context, f func(context.Context, storage.Transaction) error) error {
tr, err := j.read()
if err != nil {
return err
}
return f(ctx, tr)
}
func (j *YamlSumStorageFile) ReadWrite(ctx context.Context, f func(context.Context, storage.Transaction) error) error {
tr, err := j.read()
if err != nil {
return err
}
if err := f(ctx, tr); err != nil {
return err
}
if ctx.Err() != nil {
return ctx.Err()
}
if !tr.dirty {
return nil
}
dir := filepath.Dir(j.Path)
if dir != "" {
_ = os.MkdirAll(dir, 0700)
}
file, err := os.OpenFile(j.Path, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, j.Perm)
if err != nil {
return err
}
if err := yaml.NewEncoder(file).Encode(tr.sum); err != nil {
_ = file.Close()
return err
}
return file.Close()
}
|
{
return nil, err
}
|
deploy_ret.pb.go
|
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: deploy_ret.proto
package easy_flow
import (
fmt "fmt"
_ "github.com/gogo/protobuf/gogoproto"
proto "github.com/gogo/protobuf/proto"
_ "github.com/mwitkow/go-proto-validators"
math "math"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
//
//部署返回
type DeployRet struct {
//
//任务Id
TaskId string `protobuf:"bytes,1,opt,name=taskId,proto3" json:"taskId" form:"taskId"`
//
//应用Id
AppId string `protobuf:"bytes,2,opt,name=appId,proto3" json:"appId" form:"appId"`
//
//app名字
AppName string `protobuf:"bytes,3,opt,name=appName,proto3" json:"appName" form:"appName"`
//
//操作人
Operator string `protobuf:"bytes,4,opt,name=operator,proto3" json:"operator" form:"operator"`
//
//客户id
Org int32 `protobuf:"varint,5,opt,name=org,proto3" json:"org" form:"org"`
//
//目标列表
TargetList []*TargetResult `protobuf:"bytes,6,rep,name=targetList,proto3" json:"targetList" form:"targetList"`
//
//分批数量
BatchNum int32 `protobuf:"varint,7,opt,name=batchNum,proto3" json:"batchNum" form:"batchNum"`
//
//分批间隔
BatchInterval int32 `protobuf:"varint,8,opt,name=batchInterval,proto3" json:"batchInterval" form:"batchInterval"`
//
//是否暂停失败
FailedStop string `protobuf:"bytes,9,opt,name=failedStop,proto3" json:"failedStop" form:"failedStop"`
//
//状态
Status string `protobuf:"bytes,10,opt,name=status,proto3" json:"status" form:"status"`
//
//错误码
Code int32 `protobuf:"varint,11,opt,name=code,proto3" json:"code" form:"code"`
//
//使用次数
UsedTime int32 `protobuf:"varint,12,opt,name=usedTime,proto3" json:"usedTime" form:"usedTime"`
//
//开始时间
StartTime string `protobuf:"bytes,13,opt,name=startTime,proto3" json:"startTime" form:"startTime"`
//
//结束时间
EndTime string `protobuf:"bytes,14,opt,name=endTime,proto3" json:"endTime" form:"endTime"`
//
//集群Id
ClusterId string `protobuf:"bytes,15,opt,name=clusterId,proto3" json:"clusterId" form:"clusterId"`
//
//配置包Id
ConfigPackageId string `protobuf:"bytes,16,opt,name=configPackageId,proto3" json:"configPackageId" form:"configPackageId"`
//
//标签
Labels *DeployLabel `protobuf:"bytes,17,opt,name=labels,proto3" json:"labels" form:"labels"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *DeployRet) Reset() { *m = DeployRet{} }
func (m *DeployRet) String() string { return proto.CompactTextString(m) }
func (*DeployRet) ProtoMessage() {}
func (*DeployRet) Descriptor() ([]byte, []int) {
return fileDescriptor_5fe57fbc8dd4a84e, []int{0}
}
func (m *DeployRet) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_DeployRet.Unmarshal(m, b)
}
func (m *DeployRet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_DeployRet.Marshal(b, m, deterministic)
}
func (m *DeployRet) XXX_Merge(src proto.Message) {
xxx_messageInfo_DeployRet.Merge(m, src)
}
func (m *DeployRet) XXX_Size() int {
return xxx_messageInfo_DeployRet.Size(m)
}
func (m *DeployRet) XXX_DiscardUnknown() {
xxx_messageInfo_DeployRet.DiscardUnknown(m)
}
var xxx_messageInfo_DeployRet proto.InternalMessageInfo
func (m *DeployRet) GetTaskId() string {
if m != nil {
return m.TaskId
}
return ""
}
func (m *DeployRet) GetAppId() string {
if m != nil {
return m.AppId
}
return ""
}
func (m *DeployRet) GetAppName() string {
if m != nil {
return m.AppName
}
return ""
}
func (m *DeployRet) GetOperator() string {
if m != nil {
return m.Operator
}
return ""
}
func (m *DeployRet) GetOrg() int32 {
if m != nil {
return m.Org
}
return 0
}
func (m *DeployRet) GetTargetList() []*TargetResult {
if m != nil {
return m.TargetList
}
return nil
}
func (m *DeployRet) GetBatchNum() int32 {
if m != nil {
return m.BatchNum
}
return 0
}
func (m *DeployRet) GetBatchInterval() int32 {
if m != nil {
return m.BatchInterval
}
return 0
}
func (m *DeployRet) GetFailedStop() string {
if m != nil {
return m.FailedStop
}
return ""
}
func (m *DeployRet) GetStatus() string {
if m != nil {
return m.Status
}
return ""
}
func (m *DeployRet) GetCode() int32 {
if m != nil {
return m.Code
}
return 0
}
func (m *DeployRet) GetUsedTime() int32 {
if m != nil {
return m.UsedTime
}
return 0
}
func (m *DeployRet) GetStartTime() string {
if m != nil {
return m.StartTime
}
return ""
}
func (m *DeployRet) GetEndTime() string {
if m != nil {
return m.EndTime
}
return ""
}
func (m *DeployRet) GetClusterId() string {
if m != nil {
return m.ClusterId
}
return ""
}
func (m *DeployRet) GetConfigPackageId() string {
if m != nil {
return m.ConfigPackageId
}
return ""
}
func (m *DeployRet) GetLabels() *DeployLabel {
if m != nil {
return m.Labels
}
return nil
}
func init() {
proto.RegisterType((*DeployRet)(nil), "easy_flow.DeployRet")
}
func init() { proto.RegisterFile("deploy_ret.proto", fileDescriptor_5fe57fbc8dd4a84e) }
var fileDescriptor_5fe57fbc8dd4a84e = []byte{
// 832 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x55, 0xdd, 0x6e, 0xdb, 0x36,
0x14, 0x9e, 0x9a, 0xe6, 0xc7, 0x74, 0x93, 0x38, 0x5c, 0xd7, 0x11, 0xb9, 0x91, 0xa7, 0x1a, 0x83,
0xbc, 0x96, 0x92, 0x7f, 0xda, 0x61, 0xf5, 0xc5, 0x8c, 0x64, 0x3f, 0x80, 0x81, 0x22, 0xd8, 0xd8,
0xa0, 0x03, 0xa2, 0x39, 0x01, 0x2d, 0xd1, 0xaa, 0x11, 0x39, 0xd4, 0x24, 0xba, 0x41, 0x9b, 0xf8,
0x01, 0xf6, 0x0c, 0x7b, 0x83, 0x01, 0x7b, 0x8b, 0x61, 0xd7, 0x7b, 0x02, 0x15, 0x18, 0xb0, 0x17,
0xd0, 0x13, 0x0c, 0x22, 0x25, 0x59, 0x0d, 0x76, 0x33, 0xa0, 0xbb, 0xe8, 0x95, 0x78, 0xf8, 0x7d,
0xdf, 0xe1, 0xf9, 0x78, 0x48, 0x0a, 0x34, 0x3c, 0x16, 0x06, 0xfc, 0xd5, 0x59, 0xc4, 0x84, 0x15,
0x46, 0x5c, 0x70, 0x58, 0x63, 0x34, 0x7e, 0x75, 0x36, 0x0d, 0xf8, 0xe5, 0x3e, 0xf6, 0x67, 0xe2,
0xc5, 0x62, 0x62, 0xb9, 0x7c, 0x6e, 0xfb, 0xdc, 0xe7, 0xb6, 0x64, 0x4c, 0x16, 0x53, 0x19, 0xc9,
0x40, 0x8e, 0x94, 0x72, 0xff, 0x07, 0x9f, 0x5b, 0x99, 0x98, 0x87, 0xb1, 0x15, 0x70, 0x97, 0x06,
0xb6, 0xcb, 0x2f, 0x44, 0x44, 0x5d, 0x11, 0x2b, 0x65, 0xc4, 0x42, 0x8e, 0xe7, 0xdc, 0x63, 0x41,
0x6c, 0xe7, 0x44, 0x5b, 0x86, 0x76, 0xb9, 0xa6, 0x2d, 0x68, 0xe4, 0x33, 0x71, 0x16, 0xb1, 0x78,
0x11, 0xe4, 0x25, 0xed, 0x3f, 0x7f, 0x17, 0x89, 0x73, 0xa3, 0x01, 0x9d, 0xb0, 0x20, 0xcf, 0xfb,
0x79, 0xc5, 0xdf, 0xfc, 0x72, 0x26, 0xce, 0xf9, 0xa5, 0xed, 0x73, 0x2c, 0x41, 0xfc, 0x92, 0x06,
0x33, 0x8f, 0x0a, 0x1e, 0xc5, 0x76, 0x39, 0x54, 0x3a, 0xe3, 0xcf, 0x3a, 0xa8, 0x7d, 0x2d, 0xd3,
0x11, 0x26, 0x60, 0x1b, 0x6c, 0x08, 0x1a, 0x9f, 0x8f, 0x3c, 0xa4, 0x35, 0x35, 0xb3, 0x76, 0xb8,
0x97, 0x26, 0xfa, 0xf6, 0x94, 0x47, 0xf3, 0x81, 0xa1, 0xe6, 0x0d, 0x92, 0x13, 0xe0, 0x00, 0xac,
0xd3, 0x30, 0x1c, 0x79, 0xe8, 0x96, 0x64, 0xb6, 0xd2, 0x44, 0xbf, 0xa3, 0x98, 0x72, 0xda, 0xf8,
0xeb, 0x8d, 0xde, 0x00, 0x3b, 0xa7, 0x4e, 0x07, 0x3f, 0xa1, 0xf8, 0xf5, 0xf8, 0xaa, 0xdb, 0x5f,
0xb6, 0x88, 0x92, 0xc0, 0x87, 0x60, 0x93, 0x86, 0xe1, 0x11, 0x9d, 0x33, 0xb4, 0x26, 0xd5, 0x30,
0x4d, 0xf4, 0x9d, 0x52, 0x9d, 0x01, 0x06, 0x29, 0x28, 0xf0, 0x39, 0xd8, 0xe2, 0x21, 0x8b, 0xb2,
0xa2, 0xd1, 0x6d, 0x49, 0x1f, 0xa4, 0x89, 0xbe, 0xab, 0xe8, 0x05, 0x92, 0xad, 0x77, 0x1f, 0x7c,
0x72, 0xea, 0x50, 0xfc, 0xfa, 0x00, 0x9f, 0x74, 0xf0, 0x93, 0xb1, 0x63, 0x95, 0xe3, 0x33, 0x3c,
0xbe, 0xea, 0x3d, 0xec, 0x77, 0x97, 0x2d, 0x52, 0xe6, 0x82, 0x9f, 0x82, 0x35, 0x1e, 0xf9, 0x68,
0xbd, 0xa9, 0x99, 0xeb, 0x87, 0x77, 0xd3, 0x44, 0x07, 0x79, 0xca, 0xc8, 0xcf, 0xb2, 0xdd, 0x6a,
0x7c, 0x40, 0x32, 0x02, 0x3c, 0x02, 0x40, 0x75, 0xf2, 0xe9, 0x2c, 0x16, 0x68, 0xa3, 0xb9, 0x66,
0xd6, 0x7b, 0x1f, 0x5b, 0x65, 0x37, 0xac, 0x63, 0x09, 0x12, 0xd9, 0xe5, 0xc3, 0x8f, 0xd2, 0x44,
0xdf, 0x2b, 0x76, 0xac, 0x10, 0x19, 0xa4, 0x92, 0x01, 0xda, 0x60, 0x6b, 0x42, 0x85, 0xfb, 0xe2,
0x68, 0x31, 0x47, 0x9b, 0x72, 0xf1, 0x0f, 0x57, 0x7e, 0x0a, 0xc4, 0x20, 0x25, 0x09, 0x7e, 0x09,
0xb6, 0xe5, 0x78, 0x74, 0x21, 0x58, 0xf4, 0x92, 0x06, 0x68, 0x4b, 0xaa, 0x50, 0x9a, 0xe8, 0x77,
0x2b, 0xaa, 0x02, 0x36, 0xc8, 0xdb, 0x74, 0xf8, 0x18, 0x80, 0x29, 0x9d, 0x05, 0xcc, 0x7b, 0x26,
0x78, 0x88, 0x6a, 0x72, 0x0b, 0x2b, 0x75, 0xae, 0x30, 0x83, 0x54, 0x88, 0xd9, 0x61, 0x88, 0x05,
0x15, 0x8b, 0x18, 0x81, 0x9b, 0x87, 0x41, 0xcd, 0x1b, 0x24, 0x27, 0xc0, 0xfb, 0xe0, 0xb6, 0xcb,
0x3d, 0x86, 0xea, 0xb2, 0xb0, 0xdd, 0x34, 0xd1, 0xeb, 0x8a, 0x98, 0xcd, 0x1a, 0x44, 0x82, 0x99,
0xef, 0x45, 0xcc, 0xbc, 0xe3, 0xd9, 0x9c, 0xa1, 0x3b, 0x37, 0x7d, 0x17, 0x88, 0x41, 0x4a, 0x12,
0xfc, 0x5b, 0x03, 0xb5, 0x58, 0xd0, 0x48, 0x48, 0xc9, 0xb6, 0x2c, 0xe2, 0x0f, 0x2d, 0x4d, 0xf4,
0x46, 0x59, 0x85, 0xc2, 0xb2, 0x76, 0xfd, 0xa6, 0x81, 0x5f, 0xb5, 0x53, 0xd3, 0x1c, 0x0e, 0x9c,
0x6e, 0xd6, 0xfc, 0xec, 0x04, 0x7c, 0xd6, 0x1e, 0xca, 0xef, 0xd5, 0xa3, 0x65, 0x1b, 0x9b, 0x5d,
0xa7, 0x83, 0x7b, 0xe3, 0xeb, 0x8e, 0xc4, 0xdb, 0xd8, 0xec, 0x3b, 0x1d, 0xdc, 0x2d, 0xe2, 0x6b,
0xa7, 0x8b, 0x7b, 0x4a, 0xd5, 0x76, 0x8e, 0x9b, 0x63, 0xb3, 0xe7, 0x74, 0x70, 0x7f, 0x7c, 0x2d,
0x39, 0x6a, 0x7a, 0x60, 0x3a, 0x1d, 0xfc, 0xb8, 0x08, 0x56, 0x63, 0xf3, 0x47, 0x4b, 0x7e, 0x1f,
0xb4, 0x87, 0xe6, 0xc9, 0xb5, 0xf3, 0x00, 0x8f, 0xcd, 0xe1, 0xe0, 0x5f, 0xe4, 0x15, 0xf5, 0xb0,
0x45, 0x56, 0xce, 0xe0, 0x1b, 0x0d, 0x6c, 0xb2, 0x0b, 0xb5, 0x31, 0x3b, 0xd2, 0xe5, 0xef, 0xda,
0xea, 0x42, 0xe4, 0xc8, 0xfb, 0xe8, 0xb1, 0x70, 0x05, 0x7f, 0xd6, 0x40, 0xcd, 0x0d, 0x16, 0xb1,
0x60, 0xd1, 0xc8, 0x43, 0xbb, 0xd2, 0xe3, 0xf9, 0xaa, 0x91, 0x25, 0x94, 0x99, 0x7c, 0x06, 0xbe,
0xcf, 0x6e, 0xf1, 0xf4, 0x00, 0x7f, 0x2b, 0x7d, 0x7d, 0xb1, 0xc4, 0xc3, 0x6a, 0xfc, 0xe8, 0x3f,
|
0xff, 0x8e, 0xba, 0xe7, 0xd4, 0x67, 0x23, 0x0f, 0x35, 0x64, 0x45, 0x3f, 0xa5, 0x89, 0x7e, 0xaf,
0x38, 0xb7, 0x6f, 0x11, 0xfe, 0xb7, 0xba, 0x6e, 0x56, 0x02, 0x0f, 0xc0, 0x86, 0x7c, 0xd6, 0x63,
0xb4, 0xd7, 0xd4, 0xcc, 0x7a, 0xef, 0x5e, 0xe5, 0xa1, 0x51, 0xef, 0xf4, 0xd3, 0x0c, 0xae, 0x5e,
0x46, 0xc5, 0x37, 0x48, 0x2e, 0x3c, 0xfc, 0xe6, 0xe4, 0xab, 0x77, 0xf0, 0x93, 0x99, 0x6c, 0x48,
0x66, 0xff, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x0d, 0x8b, 0xf7, 0xfd, 0x57, 0x07, 0x00, 0x00,
}
|
0xc6, 0xdd, 0xde, 0xb2, 0x45, 0x56, 0xab, 0xc3, 0x5f, 0x34, 0xb0, 0xeb, 0xf2, 0x8b, 0xe9, 0xcc,
|
GpuWavefront.py
|
# Copyright (c) 2017-2021 Advanced Micro Devices, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from m5.params import *
from m5.proxy import *
from m5.objects.TesterThread import TesterThread
class GpuWavefront(TesterThread):
|
type = 'GpuWavefront'
cxx_header = "cpu/testers/gpu_ruby_test/gpu_wavefront.hh"
cxx_class = 'gem5::GpuWavefront'
cu_id = Param.Int("Compute Unit ID")
|
|
app.module.ts
|
import * as Joi from '@hapi/joi';
import { Module } from '@nestjs/common';
import { ConfigModule } from '@nestjs/config';
import { AuthenticationModule } from './authentication/authentication.module';
import { CategoriesModule } from './categories/categories.module';
import { DatabaseModule } from './database/database.module';
import { PostsModule } from './posts/posts.module';
import { UsersModule } from './users/users.module';
@Module({
imports: [
ConfigModule.forRoot({
validationSchema: Joi.object({
POSTGRES_HOST: Joi.string().required(),
POSTGRES_PORT: Joi.number().required(),
POSTGRES_USER: Joi.string().required(),
POSTGRES_PASSWORD: Joi.string().required(),
POSTGRES_DB: Joi.string().required(),
|
JWT_EXPIRATION_TIME: Joi.string().required()
})
}),
DatabaseModule,
UsersModule,
AuthenticationModule,
PostsModule,
CategoriesModule
],
controllers: [],
providers: []
})
export class AppModule {}
|
PORT: Joi.number(),
JWT_SECRET: Joi.string().required(),
|
util.rs
|
use serde::Serialize;
use serde_json;
use std::process::exit;
use std::io::Write;
use std::fs::OpenOptions;
pub fn write_json_to_file<T:Serialize>(filename: &str,content: &T,err_prompt: &str)
|
{
let mut file = OpenOptions::new()
.write(true)
.create(true)
.open(filename)
.unwrap_or_else(|err| { writeln!(std::io::stderr(), "{},{}",err, err_prompt).ok(); exit(-1);});
let output = serde_json::to_string(content)
.unwrap_or_else(|err| { writeln!(std::io::stderr(), "{},{}",err, err_prompt).ok(); exit(-1);});
file
.write_all(&output.into_bytes())
.unwrap_or_else(|err| { writeln!(std::io::stderr(), "{},{}",err, err_prompt).ok(); exit(-1);});
}
|
|
nats_connection_handler.go
|
package events
import (
"errors"
"github.com/nats-io/nats.go"
logger "github.com/sirupsen/logrus"
"sort"
"sync"
)
type NatsConnectionHandler struct {
NatsConnection *nats.Conn
Subscriptions []*nats.Subscription
topics []string
natsURL string
MessageHandler func(m *nats.Msg)
mux sync.Mutex
}
func NewNatsConnectionHandler(natsURL string) *NatsConnectionHandler {
nch := &NatsConnectionHandler{
natsURL: natsURL,
}
return nch
}
func (nch *NatsConnectionHandler) RemoveAllSubscriptions() {
for _, sub := range nch.Subscriptions {
// Unsubscribe
_ = sub.Unsubscribe()
logger.Infof("Unsubscribed from NATS topic: %s", sub.Subject)
}
nch.Subscriptions = nch.Subscriptions[:0]
}
// SubscribeToTopics expresses interest in the given subject on the NATS message broker.
// Note, that when you pass in subjects via the topics parameter, the NatsConnectionHandler will
// try to subscribe to these topics. If you don't pass any subjects via the topics parameter
// the NatsConnectionHandler will subscribe to the topics configured at instantiation time
func (nch *NatsConnectionHandler) SubscribeToTopics(topics []string) error {
nch.mux.Lock()
defer nch.mux.Unlock()
if nch.natsURL == "" {
return errors.New("no PubSub URL defined")
}
if nch.NatsConnection == nil || !nch.NatsConnection.IsConnected() {
var err error
nch.RemoveAllSubscriptions()
nch.NatsConnection.Close()
logger.Infof("Connecting to NATS server at %s ...", nch.natsURL)
nch.NatsConnection, err = nats.Connect(nch.natsURL)
if err != nil {
return errors.New("failed to create NATS connection: " + err.Error())
}
logger.Info("Connected to NATS server")
}
if len(topics) > 0 && !IsEqual(nch.topics, topics) {
nch.RemoveAllSubscriptions()
nch.topics = topics
for _, topic := range nch.topics {
logger.Infof("Subscribing to topic %s ...", topic)
sub, err := nch.NatsConnection.Subscribe(topic, nch.MessageHandler)
if err != nil {
return errors.New("failed to subscribe to topic: " + err.Error())
}
logger.Infof("Subscribed to topic %s", topic)
nch.Subscriptions = append(nch.Subscriptions, sub)
}
}
return nil
}
func IsEqual(a1 []string, a2 []string) bool {
sort.Strings(a1)
sort.Strings(a2)
if len(a1) == len(a2)
|
else {
return false
}
return true
}
|
{
for i, v := range a1 {
if v != a2[i] {
return false
}
}
}
|
dht.go
|
package httpapi
import (
"context"
"encoding/json"
"github.com/ipfs/interface-go-ipfs-core"
caopts "github.com/ipfs/interface-go-ipfs-core/options"
"github.com/libp2p/go-libp2p-peer"
"github.com/libp2p/go-libp2p-peerstore"
notif "github.com/libp2p/go-libp2p-routing/notifications"
)
type DhtAPI HttpApi
func (api *DhtAPI) FindPeer(ctx context.Context, p peer.ID) (peerstore.PeerInfo, error) {
var out struct {
Type notif.QueryEventType
Responses []peerstore.PeerInfo
}
resp, err := api.core().Request("dht/findpeer", p.Pretty()).Send(ctx)
if err != nil {
return peerstore.PeerInfo{}, err
}
if resp.Error != nil {
return peerstore.PeerInfo{}, resp.Error
}
defer resp.Close()
dec := json.NewDecoder(resp.Output)
for {
if err := dec.Decode(&out); err != nil {
return peerstore.PeerInfo{}, err
}
if out.Type == notif.FinalPeer {
return out.Responses[0], nil
}
}
}
func (api *DhtAPI) FindProviders(ctx context.Context, p iface.Path, opts ...caopts.DhtFindProvidersOption) (<-chan peerstore.PeerInfo, error) {
options, err := caopts.DhtFindProvidersOptions(opts...)
if err != nil {
return nil, err
}
rp, err := api.core().ResolvePath(ctx, p)
if err != nil {
return nil, err
}
resp, err := api.core().Request("dht/findprovs", rp.Cid().String()).
Option("num-providers", options.NumProviders).
Send(ctx)
|
return nil, err
}
if resp.Error != nil {
return nil, resp.Error
}
res := make(chan peerstore.PeerInfo)
go func() {
defer resp.Close()
defer close(res)
dec := json.NewDecoder(resp.Output)
for {
var out struct {
Extra string
Type notif.QueryEventType
Responses []peerstore.PeerInfo
}
if err := dec.Decode(&out); err != nil {
return // todo: handle this somehow
}
if out.Type == notif.QueryError {
return // usually a 'not found' error
// todo: handle other errors
}
if out.Type == notif.Provider {
for _, pi := range out.Responses {
select {
case res <- pi:
case <-ctx.Done():
return
}
}
}
}
}()
return res, nil
}
func (api *DhtAPI) Provide(ctx context.Context, p iface.Path, opts ...caopts.DhtProvideOption) error {
options, err := caopts.DhtProvideOptions(opts...)
if err != nil {
return err
}
rp, err := api.core().ResolvePath(ctx, p)
if err != nil {
return err
}
return api.core().Request("dht/provide", rp.Cid().String()).
Option("recursive", options.Recursive).
Exec(ctx, nil)
}
func (api *DhtAPI) core() *HttpApi {
return (*HttpApi)(api)
}
|
if err != nil {
|
app.module.ts
|
import { BrowserModule } from '@angular/platform-browser';
import { APP_INITIALIZER, NgModule } from '@angular/core';
import { AppComponent } from './app.component';
import { AngularMaterialModule } from './angular-material.module';
import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
import { StellarObjectComponent } from './planet/stellar-object.component';
import { HttpClientModule } from '@angular/common/http';
import { AppConfig } from 'src/config/app-config';
@NgModule({
declarations: [AppComponent, StellarObjectComponent],
imports: [
BrowserModule,
AngularMaterialModule,
BrowserAnimationsModule,
HttpClientModule,
],
providers: [
AppConfig,
{
provide: APP_INITIALIZER,
useFactory: initializeAppConfig,
deps: [AppConfig],
multi: true,
},
],
bootstrap: [AppComponent],
})
export class AppModule {}
|
}
|
export function initializeAppConfig(appConfig: AppConfig): () => Promise<void> {
return () => appConfig.loadEnvironmentFile();
|
containeranalysis-gen.go
|
// Copyright 2020 Google LLC.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Code generated file. DO NOT EDIT.
// Package containeranalysis provides access to the Container Analysis API.
//
// For product documentation, see: https://cloud.google.com/container-analysis/api/reference/rest/
//
// Creating a client
//
// Usage example:
//
// import "google.golang.org/api/containeranalysis/v1alpha1"
// ...
// ctx := context.Background()
// containeranalysisService, err := containeranalysis.NewService(ctx)
//
// In this example, Google Application Default Credentials are used for authentication.
//
// For information on how to create and obtain Application Default Credentials, see https://developers.google.com/identity/protocols/application-default-credentials.
//
// Other authentication options
//
// To use an API key for authentication (note: some APIs do not support API keys), use option.WithAPIKey:
//
// containeranalysisService, err := containeranalysis.NewService(ctx, option.WithAPIKey("AIza..."))
//
// To use an OAuth token (e.g., a user token obtained via a three-legged OAuth flow), use option.WithTokenSource:
//
// config := &oauth2.Config{...}
// // ...
// token, err := config.Exchange(ctx, ...)
// containeranalysisService, err := containeranalysis.NewService(ctx, option.WithTokenSource(config.TokenSource(ctx, token)))
//
// See https://godoc.org/google.golang.org/api/option/ for details on options.
package containeranalysis // import "google.golang.org/api/containeranalysis/v1alpha1"
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"net/url"
"strconv"
"strings"
googleapi "google.golang.org/api/googleapi"
gensupport "google.golang.org/api/internal/gensupport"
option "google.golang.org/api/option"
internaloption "google.golang.org/api/option/internaloption"
htransport "google.golang.org/api/transport/http"
)
// Always reference these packages, just in case the auto-generated code
// below doesn't.
var _ = bytes.NewBuffer
var _ = strconv.Itoa
var _ = fmt.Sprintf
var _ = json.NewDecoder
var _ = io.Copy
var _ = url.Parse
var _ = gensupport.MarshalJSON
var _ = googleapi.Version
var _ = errors.New
var _ = strings.Replace
var _ = context.Canceled
var _ = internaloption.WithDefaultEndpoint
const apiId = "containeranalysis:v1alpha1"
const apiName = "containeranalysis"
const apiVersion = "v1alpha1"
const basePath = "https://containeranalysis.googleapis.com/"
const mtlsBasePath = "https://containeranalysis.mtls.googleapis.com/"
// OAuth2 scopes used by this API.
const (
// View and manage your data across Google Cloud Platform services
CloudPlatformScope = "https://www.googleapis.com/auth/cloud-platform"
)
// NewService creates a new Service.
func NewService(ctx context.Context, opts ...option.ClientOption) (*Service, error) {
scopesOption := option.WithScopes(
"https://www.googleapis.com/auth/cloud-platform",
)
// NOTE: prepend, so we don't override user-specified scopes.
opts = append([]option.ClientOption{scopesOption}, opts...)
opts = append(opts, internaloption.WithDefaultEndpoint(basePath))
opts = append(opts, internaloption.WithDefaultMTLSEndpoint(mtlsBasePath))
client, endpoint, err := htransport.NewClient(ctx, opts...)
if err != nil {
return nil, err
}
s, err := New(client)
if err != nil {
return nil, err
}
if endpoint != "" {
s.BasePath = endpoint
}
return s, nil
}
// New creates a new Service. It uses the provided http.Client for requests.
//
// Deprecated: please use NewService instead.
// To provide a custom HTTP client, use option.WithHTTPClient.
// If you are using google.golang.org/api/googleapis/transport.APIKey, use option.WithAPIKey with NewService instead.
func New(client *http.Client) (*Service, error) {
if client == nil {
return nil, errors.New("client is nil")
}
s := &Service{client: client, BasePath: basePath}
s.Projects = NewProjectsService(s)
s.Providers = NewProvidersService(s)
return s, nil
}
type Service struct {
client *http.Client
BasePath string // API endpoint base URL
UserAgent string // optional additional User-Agent fragment
Projects *ProjectsService
Providers *ProvidersService
}
func (s *Service) userAgent() string {
if s.UserAgent == "" {
return googleapi.UserAgent
}
return googleapi.UserAgent + " " + s.UserAgent
}
func NewProjectsService(s *Service) *ProjectsService {
rs := &ProjectsService{s: s}
rs.Notes = NewProjectsNotesService(s)
rs.Occurrences = NewProjectsOccurrencesService(s)
rs.Operations = NewProjectsOperationsService(s)
rs.ScanConfigs = NewProjectsScanConfigsService(s)
return rs
}
type ProjectsService struct {
s *Service
Notes *ProjectsNotesService
Occurrences *ProjectsOccurrencesService
Operations *ProjectsOperationsService
ScanConfigs *ProjectsScanConfigsService
}
func NewProjectsNotesService(s *Service) *ProjectsNotesService {
rs := &ProjectsNotesService{s: s}
rs.Occurrences = NewProjectsNotesOccurrencesService(s)
return rs
}
type ProjectsNotesService struct {
s *Service
Occurrences *ProjectsNotesOccurrencesService
}
func NewProjectsNotesOccurrencesService(s *Service) *ProjectsNotesOccurrencesService {
rs := &ProjectsNotesOccurrencesService{s: s}
return rs
}
type ProjectsNotesOccurrencesService struct {
s *Service
}
func NewProjectsOccurrencesService(s *Service) *ProjectsOccurrencesService {
rs := &ProjectsOccurrencesService{s: s}
return rs
}
type ProjectsOccurrencesService struct {
s *Service
}
func NewProjectsOperationsService(s *Service) *ProjectsOperationsService {
rs := &ProjectsOperationsService{s: s}
return rs
}
type ProjectsOperationsService struct {
s *Service
}
func NewProjectsScanConfigsService(s *Service) *ProjectsScanConfigsService {
rs := &ProjectsScanConfigsService{s: s}
return rs
}
type ProjectsScanConfigsService struct {
s *Service
}
func NewProvidersService(s *Service) *ProvidersService {
rs := &ProvidersService{s: s}
rs.Notes = NewProvidersNotesService(s)
return rs
}
type ProvidersService struct {
s *Service
Notes *ProvidersNotesService
}
func NewProvidersNotesService(s *Service) *ProvidersNotesService {
rs := &ProvidersNotesService{s: s}
rs.Occurrences = NewProvidersNotesOccurrencesService(s)
return rs
}
type ProvidersNotesService struct {
s *Service
Occurrences *ProvidersNotesOccurrencesService
}
func NewProvidersNotesOccurrencesService(s *Service) *ProvidersNotesOccurrencesService {
rs := &ProvidersNotesOccurrencesService{s: s}
return rs
}
type ProvidersNotesOccurrencesService struct {
s *Service
}
// Artifact: Artifact describes a build product.
type Artifact struct {
// Checksum: Hash or checksum value of a binary, or Docker Registry 2.0
// digest of a container.
Checksum string `json:"checksum,omitempty"`
// Id: Artifact ID, if any; for container images, this will be a URL by
// digest like gcr.io/projectID/imagename@sha256:123456
Id string `json:"id,omitempty"`
// Name: Name of the artifact. This may be the path to a binary or jar
// file, or in the case of a container build, the name used to push the
// container image to Google Container Registry, as presented to `docker
// push`. This field is deprecated in favor of the plural `names` field;
// it continues to exist here to allow existing BuildProvenance
// serialized to json in
// google.devtools.containeranalysis.v1alpha1.BuildDetails.provenance_byt
// es to deserialize back into proto.
Name string `json:"name,omitempty"`
// Names: Related artifact names. This may be the path to a binary or
// jar file, or in the case of a container build, the name used to push
// the container image to Google Container Registry, as presented to
// `docker push`. Note that a single Artifact ID can have multiple
// names, for example if two tags are applied to one image.
Names []string `json:"names,omitempty"`
// ForceSendFields is a list of field names (e.g. "Checksum") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Checksum") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Artifact) MarshalJSON() ([]byte, error) {
type NoMethod Artifact
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Attestation: Occurrence that represents a single "attestation". The
// authenticity of an Attestation can be verified using the attached
// signature. If the verifier trusts the public key of the signer, then
// verifying the signature is sufficient to establish trust. In this
// circumstance, the AttestationAuthority to which this Attestation is
// attached is primarily useful for look-up (how to find this
// Attestation if you already know the Authority and artifact to be
// verified) and intent (which authority was this attestation intended
// to sign for).
type Attestation struct {
PgpSignedAttestation *PgpSignedAttestation `json:"pgpSignedAttestation,omitempty"`
// ForceSendFields is a list of field names (e.g.
// "PgpSignedAttestation") to unconditionally include in API requests.
// By default, fields with empty values are omitted from API requests.
// However, any non-pointer, non-interface field appearing in
// ForceSendFields will be sent to the server regardless of whether the
// field is empty or not. This may be used to include empty fields in
// Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "PgpSignedAttestation") to
// include in API requests with the JSON null value. By default, fields
// with empty values are omitted from API requests. However, any field
// with an empty value appearing in NullFields will be sent to the
// server as null. It is an error if a field in this list has a
// non-empty value. This may be used to include null fields in Patch
// requests.
NullFields []string `json:"-"`
}
func (s *Attestation) MarshalJSON() ([]byte, error) {
type NoMethod Attestation
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// AttestationAuthority: Note kind that represents a logical attestation
// "role" or "authority". For example, an organization might have one
// `AttestationAuthority` for "QA" and one for "build". This Note is
// intended to act strictly as a grouping mechanism for the attached
// Occurrences (Attestations). This grouping mechanism also provides a
// security boundary, since IAM ACLs gate the ability for a principle to
// attach an Occurrence to a given Note. It also provides a single point
// of lookup to find all attached Attestation Occurrences, even if they
// don't all live in the same project.
type AttestationAuthority struct {
Hint *AttestationAuthorityHint `json:"hint,omitempty"`
// ForceSendFields is a list of field names (e.g. "Hint") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Hint") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *AttestationAuthority) MarshalJSON() ([]byte, error) {
type NoMethod AttestationAuthority
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// AttestationAuthorityHint: This submessage provides human-readable
// hints about the purpose of the AttestationAuthority. Because the name
// of a Note acts as its resource reference, it is important to
// disambiguate the canonical name of the Note (which might be a UUID
// for security purposes) from "readable" names more suitable for debug
// output. Note that these hints should NOT be used to look up
// AttestationAuthorities in security sensitive contexts, such as when
// looking up Attestations to verify.
type AttestationAuthorityHint struct {
// HumanReadableName: The human readable name of this Attestation
// Authority, for example "qa".
HumanReadableName string `json:"humanReadableName,omitempty"`
// ForceSendFields is a list of field names (e.g. "HumanReadableName")
// to unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "HumanReadableName") to
// include in API requests with the JSON null value. By default, fields
// with empty values are omitted from API requests. However, any field
// with an empty value appearing in NullFields will be sent to the
// server as null. It is an error if a field in this list has a
// non-empty value. This may be used to include null fields in Patch
// requests.
NullFields []string `json:"-"`
}
func (s *AttestationAuthorityHint) MarshalJSON() ([]byte, error) {
type NoMethod AttestationAuthorityHint
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Basis: Basis describes the base image portion (Note) of the
// DockerImage relationship. Linked occurrences are derived from this or
// an equivalent image via: FROM Or an equivalent reference, e.g. a tag
// of the resource_url.
type Basis struct {
// Fingerprint: The fingerprint of the base image.
Fingerprint *Fingerprint `json:"fingerprint,omitempty"`
// ResourceUrl: The resource_url for the resource representing the basis
// of associated occurrence images.
ResourceUrl string `json:"resourceUrl,omitempty"`
// ForceSendFields is a list of field names (e.g. "Fingerprint") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Fingerprint") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Basis) MarshalJSON() ([]byte, error) {
type NoMethod Basis
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Binding: Associates `members` with a `role`.
type Binding struct {
// Condition: The condition that is associated with this binding. If the
// condition evaluates to `true`, then this binding applies to the
// current request. If the condition evaluates to `false`, then this
// binding does not apply to the current request. However, a different
// role binding might grant the same role to one or more of the members
// in this binding. To learn which resources support conditions in their
// IAM policies, see the [IAM
// documentation](https://cloud.google.com/iam/help/conditions/resource-p
// olicies).
Condition *Expr `json:"condition,omitempty"`
// Members: Specifies the identities requesting access for a Cloud
// Platform resource. `members` can have the following values: *
// `allUsers`: A special identifier that represents anyone who is on the
// internet; with or without a Google account. *
// `allAuthenticatedUsers`: A special identifier that represents anyone
// who is authenticated with a Google account or a service account. *
// `user:{emailid}`: An email address that represents a specific Google
// account. For example, `[email protected]` . *
// `serviceAccount:{emailid}`: An email address that represents a
// service account. For example,
// `[email protected]`. * `group:{emailid}`: An
// email address that represents a Google group. For example,
// `[email protected]`. * `deleted:user:{emailid}?uid={uniqueid}`: An
// email address (plus unique identifier) representing a user that has
// been recently deleted. For example,
// `[email protected]?uid=123456789012345678901`. If the user is
// recovered, this value reverts to `user:{emailid}` and the recovered
// user retains the role in the binding. *
// `deleted:serviceAccount:{emailid}?uid={uniqueid}`: An email address
// (plus unique identifier) representing a service account that has been
// recently deleted. For example,
// `[email protected]?uid=123456789012345678901`.
// If the service account is undeleted, this value reverts to
// `serviceAccount:{emailid}` and the undeleted service account retains
// the role in the binding. * `deleted:group:{emailid}?uid={uniqueid}`:
// An email address (plus unique identifier) representing a Google group
// that has been recently deleted. For example,
// `[email protected]?uid=123456789012345678901`. If the group is
// recovered, this value reverts to `group:{emailid}` and the recovered
// group retains the role in the binding. * `domain:{domain}`: The G
// Suite domain (primary) that represents all the users of that domain.
// For example, `google.com` or `example.com`.
Members []string `json:"members,omitempty"`
// Role: Role that is assigned to `members`. For example,
// `roles/viewer`, `roles/editor`, or `roles/owner`.
Role string `json:"role,omitempty"`
// ForceSendFields is a list of field names (e.g. "Condition") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Condition") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Binding) MarshalJSON() ([]byte, error) {
type NoMethod Binding
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// BuildDetails: Message encapsulating build provenance details.
type BuildDetails struct {
// Provenance: The actual provenance
Provenance *BuildProvenance `json:"provenance,omitempty"`
// ProvenanceBytes: Serialized JSON representation of the provenance,
// used in generating the `BuildSignature` in the corresponding Result.
// After verifying the signature, `provenance_bytes` can be unmarshalled
// and compared to the provenance to confirm that it is unchanged. A
// base64-encoded string representation of the provenance bytes is used
// for the signature in order to interoperate with openssl which expects
// this format for signature verification. The serialized form is
// captured both to avoid ambiguity in how the provenance is marshalled
// to json as well to prevent incompatibilities with future changes.
ProvenanceBytes string `json:"provenanceBytes,omitempty"`
// ForceSendFields is a list of field names (e.g. "Provenance") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Provenance") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *BuildDetails) MarshalJSON() ([]byte, error) {
type NoMethod BuildDetails
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// BuildProvenance: Provenance of a build. Contains all information
// needed to verify the full details about the build from source to
// completion.
type BuildProvenance struct {
// BuildOptions: Special options applied to this build. This is a
// catch-all field where build providers can enter any desired
// additional details.
BuildOptions map[string]string `json:"buildOptions,omitempty"`
// BuilderVersion: Version string of the builder at the time this build
// was executed.
BuilderVersion string `json:"builderVersion,omitempty"`
// BuiltArtifacts: Output of the build.
BuiltArtifacts []*Artifact `json:"builtArtifacts,omitempty"`
// Commands: Commands requested by the build.
Commands []*Command `json:"commands,omitempty"`
// CreateTime: Time at which the build was created.
CreateTime string `json:"createTime,omitempty"`
// Creator: E-mail address of the user who initiated this build. Note
// that this was the user's e-mail address at the time the build was
// initiated; this address may not represent the same end-user for all
// time.
Creator string `json:"creator,omitempty"`
// FinishTime: Time at which execution of the build was finished.
FinishTime string `json:"finishTime,omitempty"`
// Id: Unique identifier of the build.
Id string `json:"id,omitempty"`
// LogsBucket: Google Cloud Storage bucket where logs were written.
LogsBucket string `json:"logsBucket,omitempty"`
// ProjectId: ID of the project.
ProjectId string `json:"projectId,omitempty"`
// SourceProvenance: Details of the Source input to the build.
SourceProvenance *Source `json:"sourceProvenance,omitempty"`
// StartTime: Time at which execution of the build was started.
StartTime string `json:"startTime,omitempty"`
// TriggerId: Trigger identifier if the build was triggered
// automatically; empty if not.
TriggerId string `json:"triggerId,omitempty"`
// ForceSendFields is a list of field names (e.g. "BuildOptions") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "BuildOptions") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *BuildProvenance) MarshalJSON() ([]byte, error) {
type NoMethod BuildProvenance
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// BuildSignature: Message encapsulating the signature of the verified
// build.
type BuildSignature struct {
// KeyId: An Id for the key used to sign. This could be either an Id for
// the key stored in `public_key` (such as the Id or fingerprint for a
// PGP key, or the CN for a cert), or a reference to an external key
// (such as a reference to a key in Cloud Key Management Service).
KeyId string `json:"keyId,omitempty"`
// KeyType: The type of the key, either stored in `public_key` or
// referenced in `key_id`
//
// Possible values:
// "KEY_TYPE_UNSPECIFIED" - `KeyType` is not set.
// "PGP_ASCII_ARMORED" - `PGP ASCII Armored` public key.
// "PKIX_PEM" - `PKIX PEM` public key.
KeyType string `json:"keyType,omitempty"`
// PublicKey: Public key of the builder which can be used to verify that
// the related findings are valid and unchanged. If `key_type` is empty,
// this defaults to PEM encoded public keys. This field may be empty if
// `key_id` references an external key. For Cloud Build based
// signatures, this is a PEM encoded public key. To verify the Cloud
// Build signature, place the contents of this field into a file
// (public.pem). The signature field is base64-decoded into its binary
// representation in signature.bin, and the provenance bytes from
// `BuildDetails` are base64-decoded into a binary representation in
// signed.bin. OpenSSL can then verify the signature: `openssl sha256
// -verify public.pem -signature signature.bin signed.bin`
PublicKey string `json:"publicKey,omitempty"`
// Signature: Signature of the related `BuildProvenance`, encoded in a
// base64 string.
Signature string `json:"signature,omitempty"`
// ForceSendFields is a list of field names (e.g. "KeyId") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "KeyId") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *BuildSignature) MarshalJSON() ([]byte, error) {
type NoMethod BuildSignature
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// BuildType: Note holding the version of the provider's builder and the
// signature of the provenance message in linked BuildDetails.
type BuildType struct {
// BuilderVersion: Version of the builder which produced this Note.
BuilderVersion string `json:"builderVersion,omitempty"`
// Signature: Signature of the build in Occurrences pointing to the Note
// containing this `BuilderDetails`.
Signature *BuildSignature `json:"signature,omitempty"`
// ForceSendFields is a list of field names (e.g. "BuilderVersion") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "BuilderVersion") to
// include in API requests with the JSON null value. By default, fields
// with empty values are omitted from API requests. However, any field
// with an empty value appearing in NullFields will be sent to the
// server as null. It is an error if a field in this list has a
// non-empty value. This may be used to include null fields in Patch
// requests.
NullFields []string `json:"-"`
}
func (s *BuildType) MarshalJSON() ([]byte, error) {
type NoMethod BuildType
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Command: Command describes a step performed as part of the build
// pipeline.
type Command struct {
// Args: Command-line arguments used when executing this Command.
Args []string `json:"args,omitempty"`
// Dir: Working directory (relative to project source root) used when
// running this Command.
Dir string `json:"dir,omitempty"`
// Env: Environment variables set before running this Command.
Env []string `json:"env,omitempty"`
// Id: Optional unique identifier for this Command, used in wait_for to
// reference this Command as a dependency.
Id string `json:"id,omitempty"`
// Name: Name of the command, as presented on the command line, or if
// the command is packaged as a Docker container, as presented to
// `docker pull`.
Name string `json:"name,omitempty"`
// WaitFor: The ID(s) of the Command(s) that this Command depends on.
WaitFor []string `json:"waitFor,omitempty"`
// ForceSendFields is a list of field names (e.g. "Args") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Args") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Command) MarshalJSON() ([]byte, error) {
type NoMethod Command
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// CreateOperationRequest: Request for creating an operation
type CreateOperationRequest struct {
// Operation: The operation to create.
Operation *Operation `json:"operation,omitempty"`
// OperationId: The ID to use for this operation.
OperationId string `json:"operationId,omitempty"`
// ForceSendFields is a list of field names (e.g. "Operation") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Operation") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *CreateOperationRequest) MarshalJSON() ([]byte, error) {
type NoMethod CreateOperationRequest
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Deployable: An artifact that can be deployed in some runtime.
type Deployable struct {
// ResourceUri: Resource URI for the artifact being deployed.
ResourceUri []string `json:"resourceUri,omitempty"`
// ForceSendFields is a list of field names (e.g. "ResourceUri") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "ResourceUri") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Deployable) MarshalJSON() ([]byte, error) {
type NoMethod Deployable
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Deployment: The period during which some deployable was active in a
// runtime.
type Deployment struct {
// Address: Address of the runtime element hosting this deployment.
Address string `json:"address,omitempty"`
// Config: Configuration used to create this deployment.
Config string `json:"config,omitempty"`
// DeployTime: Beginning of the lifetime of this deployment.
DeployTime string `json:"deployTime,omitempty"`
// Platform: Platform hosting this deployment.
//
// Possible values:
// "PLATFORM_UNSPECIFIED" - Unknown
// "GKE" - Google Container Engine
// "FLEX" - Google App Engine: Flexible Environment
// "CUSTOM" - Custom user-defined platform
Platform string `json:"platform,omitempty"`
// ResourceUri: Output only. Resource URI for the artifact being
// deployed taken from the deployable field with the same name.
ResourceUri []string `json:"resourceUri,omitempty"`
// UndeployTime: End of the lifetime of this deployment.
UndeployTime string `json:"undeployTime,omitempty"`
// UserEmail: Identity of the user that triggered this deployment.
UserEmail string `json:"userEmail,omitempty"`
// ForceSendFields is a list of field names (e.g. "Address") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Address") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Deployment) MarshalJSON() ([]byte, error) {
type NoMethod Deployment
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Derived: Derived describes the derived image portion (Occurrence) of
// the DockerImage relationship. This image would be produced from a
// Dockerfile with FROM .
type Derived struct {
// BaseResourceUrl: Output only. This contains the base image URL for
// the derived image occurrence.
BaseResourceUrl string `json:"baseResourceUrl,omitempty"`
// Distance: Output only. The number of layers by which this image
// differs from the associated image basis.
Distance int64 `json:"distance,omitempty"`
// Fingerprint: The fingerprint of the derived image.
Fingerprint *Fingerprint `json:"fingerprint,omitempty"`
// LayerInfo: This contains layer-specific metadata, if populated it has
// length "distance" and is ordered with [distance] being the layer
// immediately following the base image and [1] being the final layer.
LayerInfo []*Layer `json:"layerInfo,omitempty"`
// ForceSendFields is a list of field names (e.g. "BaseResourceUrl") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "BaseResourceUrl") to
// include in API requests with the JSON null value. By default, fields
// with empty values are omitted from API requests. However, any field
// with an empty value appearing in NullFields will be sent to the
// server as null. It is an error if a field in this list has a
// non-empty value. This may be used to include null fields in Patch
// requests.
NullFields []string `json:"-"`
}
func (s *Derived) MarshalJSON() ([]byte, error) {
type NoMethod Derived
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Detail: Identifies all occurrences of this vulnerability in the
// package for a specific distro/location For example: glibc in
// cpe:/o:debian:debian_linux:8 for versions 2.1 - 2.2
type Detail struct {
// CpeUri: The cpe_uri in [cpe format]
// (https://cpe.mitre.org/specification/) in which the vulnerability
// manifests. Examples include distro or storage location for vulnerable
// jar. This field can be used as a filter in list requests.
CpeUri string `json:"cpeUri,omitempty"`
// Description: A vendor-specific description of this note.
Description string `json:"description,omitempty"`
// FixedLocation: The fix for this specific package version.
FixedLocation *VulnerabilityLocation `json:"fixedLocation,omitempty"`
// IsObsolete: Whether this Detail is obsolete. Occurrences are expected
// not to point to obsolete details.
IsObsolete bool `json:"isObsolete,omitempty"`
// MaxAffectedVersion: The max version of the package in which the
// vulnerability exists.
MaxAffectedVersion *Version `json:"maxAffectedVersion,omitempty"`
// MinAffectedVersion: The min version of the package in which the
// vulnerability exists.
MinAffectedVersion *Version `json:"minAffectedVersion,omitempty"`
// Package: The name of the package where the vulnerability was found.
// This field can be used as a filter in list requests.
Package string `json:"package,omitempty"`
// PackageType: The type of package; whether native or non native(ruby
// gems, node.js packages etc)
PackageType string `json:"packageType,omitempty"`
// SeverityName: The severity (eg: distro assigned severity) for this
// vulnerability.
SeverityName string `json:"severityName,omitempty"`
// ForceSendFields is a list of field names (e.g. "CpeUri") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "CpeUri") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Detail) MarshalJSON() ([]byte, error) {
type NoMethod Detail
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Discovered: Provides information about the scan status of a
// discovered resource.
type Discovered struct {
// AnalysisStatus: The status of discovery for the resource.
//
// Possible values:
// "ANALYSIS_STATUS_UNSPECIFIED" - Unknown
// "PENDING" - Resource is known but no action has been taken yet.
// "SCANNING" - Resource is being analyzed.
// "FINISHED_SUCCESS" - Analysis has finished successfully.
// "FINISHED_FAILED" - Analysis has finished unsuccessfully, the
// analysis itself is in a bad state.
// "FINISHED_UNSUPPORTED" - The resource is known not to be supported.
AnalysisStatus string `json:"analysisStatus,omitempty"`
// AnalysisStatusError: When an error is encountered this will contain a
// LocalizedMessage under details to show to the user. The
// LocalizedMessage output only and populated by the API.
AnalysisStatusError *Status `json:"analysisStatusError,omitempty"`
// ContinuousAnalysis: Whether the resource is continuously analyzed.
//
// Possible values:
// "CONTINUOUS_ANALYSIS_UNSPECIFIED" - Unknown
// "ACTIVE" - The resource is continuously analyzed.
// "INACTIVE" - The resource is ignored for continuous analysis.
ContinuousAnalysis string `json:"continuousAnalysis,omitempty"`
// Cpe: The CPE of the resource being scanned.
Cpe string `json:"cpe,omitempty"`
// Operation: Output only. An operation that indicates the status of the
// current scan. This field is deprecated, do not use.
Operation *Operation `json:"operation,omitempty"`
// ForceSendFields is a list of field names (e.g. "AnalysisStatus") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "AnalysisStatus") to
// include in API requests with the JSON null value. By default, fields
// with empty values are omitted from API requests. However, any field
// with an empty value appearing in NullFields will be sent to the
// server as null. It is an error if a field in this list has a
// non-empty value. This may be used to include null fields in Patch
// requests.
NullFields []string `json:"-"`
}
func (s *Discovered) MarshalJSON() ([]byte, error) {
type NoMethod Discovered
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Discovery: A note that indicates a type of analysis a provider would
// perform. This note exists in a provider's project. A `Discovery`
// occurrence is created in a consumer's project at the start of
// analysis. The occurrence's operation will indicate the status of the
// analysis. Absence of an occurrence linked to this note for a resource
// indicates that analysis hasn't started.
type Discovery struct {
// AnalysisKind: The kind of analysis that is handled by this discovery.
//
// Possible values:
// "KIND_UNSPECIFIED" - Unknown
// "PACKAGE_VULNERABILITY" - The note and occurrence represent a
// package vulnerability.
// "BUILD_DETAILS" - The note and occurrence assert build provenance.
// "IMAGE_BASIS" - This represents an image basis relationship.
// "PACKAGE_MANAGER" - This represents a package installed via a
// package manager.
// "DEPLOYABLE" - The note and occurrence track deployment events.
// "DISCOVERY" - The note and occurrence track the initial discovery
// status of a resource.
// "ATTESTATION_AUTHORITY" - This represents a logical "role" that can
// attest to artifacts.
// "UPGRADE" - This represents an available software upgrade.
AnalysisKind string `json:"analysisKind,omitempty"`
// ForceSendFields is a list of field names (e.g. "AnalysisKind") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "AnalysisKind") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Discovery) MarshalJSON() ([]byte, error) {
type NoMethod Discovery
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Distribution: This represents a particular channel of distribution
// for a given package. e.g. Debian's jessie-backports dpkg mirror
type Distribution struct {
// Architecture: The CPU architecture for which packages in this
// distribution channel were built
//
// Possible values:
// "ARCHITECTURE_UNSPECIFIED" - Unknown architecture
// "X86" - X86 architecture
// "X64" - X64 architecture
Architecture string `json:"architecture,omitempty"`
// CpeUri: The cpe_uri in [cpe
// format](https://cpe.mitre.org/specification/) denoting the package
// manager version distributing a package.
CpeUri string `json:"cpeUri,omitempty"`
// Description: The distribution channel-specific description of this
// package.
Description string `json:"description,omitempty"`
// LatestVersion: The latest available version of this package in this
// distribution channel.
LatestVersion *Version `json:"latestVersion,omitempty"`
// Maintainer: A freeform string denoting the maintainer of this
// package.
Maintainer string `json:"maintainer,omitempty"`
// Url: The distribution channel-specific homepage for this package.
Url string `json:"url,omitempty"`
// ForceSendFields is a list of field names (e.g. "Architecture") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Architecture") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Distribution) MarshalJSON() ([]byte, error) {
type NoMethod Distribution
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Empty: A generic empty message that you can re-use to avoid defining
// duplicated empty messages in your APIs. A typical example is to use
// it as the request or the response type of an API method. For
// instance: service Foo { rpc Bar(google.protobuf.Empty) returns
// (google.protobuf.Empty); } The JSON representation for `Empty` is
// empty JSON object `{}`.
type Empty struct {
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
}
// Expr: Represents a textual expression in the Common Expression
// Language (CEL) syntax. CEL is a C-like expression language. The
// syntax and semantics of CEL are documented at
// https://github.com/google/cel-spec. Example (Comparison): title:
// "Summary size limit" description: "Determines if a summary is less
// than 100 chars" expression: "document.summary.size() < 100" Example
// (Equality): title: "Requestor is owner" description: "Determines if
// requestor is the document owner" expression: "document.owner ==
// request.auth.claims.email" Example (Logic): title: "Public documents"
// description: "Determine whether the document should be publicly
// visible" expression: "document.type != 'private' && document.type !=
// 'internal'" Example (Data Manipulation): title: "Notification string"
// description: "Create a notification string with a timestamp."
// expression: "'New message received at ' +
// string(document.create_time)" The exact variables and functions that
// may be referenced within an expression are determined by the service
// that evaluates it. See the service documentation for additional
// information.
type Expr struct {
// Description: Optional. Description of the expression. This is a
// longer text which describes the expression, e.g. when hovered over it
// in a UI.
Description string `json:"description,omitempty"`
// Expression: Textual representation of an expression in Common
// Expression Language syntax.
Expression string `json:"expression,omitempty"`
// Location: Optional. String indicating the location of the expression
// for error reporting, e.g. a file name and a position in the file.
Location string `json:"location,omitempty"`
// Title: Optional. Title for the expression, i.e. a short string
// describing its purpose. This can be used e.g. in UIs which allow to
// enter the expression.
Title string `json:"title,omitempty"`
// ForceSendFields is a list of field names (e.g. "Description") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Description") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Expr) MarshalJSON() ([]byte, error) {
type NoMethod Expr
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// FileHashes: Container message for hashes of byte content of files,
// used in Source messages to verify integrity of source input to the
// build.
type FileHashes struct {
// FileHash: Collection of file hashes.
FileHash []*Hash `json:"fileHash,omitempty"`
// ForceSendFields is a list of field names (e.g. "FileHash") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "FileHash") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *FileHashes) MarshalJSON() ([]byte, error) {
type NoMethod FileHashes
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Fingerprint: A set of properties that uniquely identify a given
// Docker image.
type Fingerprint struct {
// V1Name: The layer-id of the final layer in the Docker image's v1
// representation. This field can be used as a filter in list requests.
V1Name string `json:"v1Name,omitempty"`
// V2Blob: The ordered list of v2 blobs that represent a given image.
V2Blob []string `json:"v2Blob,omitempty"`
// V2Name: Output only. The name of the image's v2 blobs computed via:
// [bottom] := v2_blobbottom := sha256(v2_blob[N] + " " + v2_name[N+1])
// Only the name of the final blob is kept. This field can be used as a
// filter in list requests.
V2Name string `json:"v2Name,omitempty"`
// ForceSendFields is a list of field names (e.g. "V1Name") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "V1Name") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Fingerprint) MarshalJSON() ([]byte, error) {
type NoMethod Fingerprint
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// GetIamPolicyRequest: Request message for `GetIamPolicy` method.
type GetIamPolicyRequest struct {
// Options: OPTIONAL: A `GetPolicyOptions` object for specifying options
// to `GetIamPolicy`.
Options *GetPolicyOptions `json:"options,omitempty"`
// ForceSendFields is a list of field names (e.g. "Options") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Options") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *GetIamPolicyRequest) MarshalJSON() ([]byte, error) {
type NoMethod GetIamPolicyRequest
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// GetPolicyOptions: Encapsulates settings provided to GetIamPolicy.
type GetPolicyOptions struct {
// RequestedPolicyVersion: Optional. The policy format version to be
// returned. Valid values are 0, 1, and 3. Requests specifying an
// invalid value will be rejected. Requests for policies with any
// conditional bindings must specify version 3. Policies without any
// conditional bindings may specify any valid value or leave the field
// unset. To learn which resources support conditions in their IAM
// policies, see the [IAM
// documentation](https://cloud.google.com/iam/help/conditions/resource-p
// olicies).
RequestedPolicyVersion int64 `json:"requestedPolicyVersion,omitempty"`
// ForceSendFields is a list of field names (e.g.
// "RequestedPolicyVersion") to unconditionally include in API requests.
// By default, fields with empty values are omitted from API requests.
// However, any non-pointer, non-interface field appearing in
// ForceSendFields will be sent to the server regardless of whether the
// field is empty or not. This may be used to include empty fields in
// Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "RequestedPolicyVersion")
// to include in API requests with the JSON null value. By default,
// fields with empty values are omitted from API requests. However, any
// field with an empty value appearing in NullFields will be sent to the
// server as null. It is an error if a field in this list has a
// non-empty value. This may be used to include null fields in Patch
// requests.
NullFields []string `json:"-"`
}
func (s *GetPolicyOptions) MarshalJSON() ([]byte, error) {
type NoMethod GetPolicyOptions
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// GetVulnzOccurrencesSummaryResponse: A summary of how many vulnz
// occurrences there are per severity type. counts by groups, or if we
// should have different summary messages like this.
type GetVulnzOccurrencesSummaryResponse struct {
// Counts: A map of how many occurrences were found for each severity.
Counts []*SeverityCount `json:"counts,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "Counts") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Counts") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *GetVulnzOccurrencesSummaryResponse) MarshalJSON() ([]byte, error) {
type NoMethod GetVulnzOccurrencesSummaryResponse
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// GoogleDevtoolsContaineranalysisV1alpha1AliasContext: An alias to a
// repo revision.
type GoogleDevtoolsContaineranalysisV1alpha1AliasContext struct {
// Kind: The alias kind.
//
// Possible values:
// "KIND_UNSPECIFIED" - Unknown.
// "FIXED" - Git tag.
// "MOVABLE" - Git branch.
// "OTHER" - Used to specify non-standard aliases. For example, if a
// Git repo has a ref named "refs/foo/bar".
Kind string `json:"kind,omitempty"`
// Name: The alias name.
Name string `json:"name,omitempty"`
// ForceSendFields is a list of field names (e.g. "Kind") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Kind") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *GoogleDevtoolsContaineranalysisV1alpha1AliasContext) MarshalJSON() ([]byte, error) {
type NoMethod GoogleDevtoolsContaineranalysisV1alpha1AliasContext
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// GoogleDevtoolsContaineranalysisV1alpha1CloudRepoSourceContext: A
// CloudRepoSourceContext denotes a particular revision in a Google
// Cloud Source Repo.
type GoogleDevtoolsContaineranalysisV1alpha1CloudRepoSourceContext struct {
// AliasContext: An alias, which may be a branch or tag.
AliasContext *GoogleDevtoolsContaineranalysisV1alpha1AliasContext `json:"aliasContext,omitempty"`
// RepoId: The ID of the repo.
RepoId *GoogleDevtoolsContaineranalysisV1alpha1RepoId `json:"repoId,omitempty"`
// RevisionId: A revision ID.
RevisionId string `json:"revisionId,omitempty"`
// ForceSendFields is a list of field names (e.g. "AliasContext") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "AliasContext") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *GoogleDevtoolsContaineranalysisV1alpha1CloudRepoSourceContext) MarshalJSON() ([]byte, error) {
type NoMethod GoogleDevtoolsContaineranalysisV1alpha1CloudRepoSourceContext
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// GoogleDevtoolsContaineranalysisV1alpha1GerritSourceContext: A
// SourceContext referring to a Gerrit project.
type GoogleDevtoolsContaineranalysisV1alpha1GerritSourceContext struct {
// AliasContext: An alias, which may be a branch or tag.
AliasContext *GoogleDevtoolsContaineranalysisV1alpha1AliasContext `json:"aliasContext,omitempty"`
// GerritProject: The full project name within the host. Projects may be
// nested, so "project/subproject" is a valid project name. The "repo
// name" is the hostURI/project.
GerritProject string `json:"gerritProject,omitempty"`
// HostUri: The URI of a running Gerrit instance.
HostUri string `json:"hostUri,omitempty"`
// RevisionId: A revision (commit) ID.
RevisionId string `json:"revisionId,omitempty"`
// ForceSendFields is a list of field names (e.g. "AliasContext") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "AliasContext") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *GoogleDevtoolsContaineranalysisV1alpha1GerritSourceContext) MarshalJSON() ([]byte, error) {
type NoMethod GoogleDevtoolsContaineranalysisV1alpha1GerritSourceContext
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// GoogleDevtoolsContaineranalysisV1alpha1GitSourceContext: A
// GitSourceContext denotes a particular revision in a third party Git
// repository (e.g., GitHub).
type GoogleDevtoolsContaineranalysisV1alpha1GitSourceContext struct {
// RevisionId: Required. Git commit hash.
RevisionId string `json:"revisionId,omitempty"`
// Url: Git repository URL.
Url string `json:"url,omitempty"`
// ForceSendFields is a list of field names (e.g. "RevisionId") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "RevisionId") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *GoogleDevtoolsContaineranalysisV1alpha1GitSourceContext) MarshalJSON() ([]byte, error) {
type NoMethod GoogleDevtoolsContaineranalysisV1alpha1GitSourceContext
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// GoogleDevtoolsContaineranalysisV1alpha1OperationMetadata: Metadata
// for all operations used and required for all operations that created
// by Container Analysis Providers
type GoogleDevtoolsContaineranalysisV1alpha1OperationMetadata struct {
// CreateTime: Output only. The time this operation was created.
CreateTime string `json:"createTime,omitempty"`
// EndTime: Output only. The time that this operation was marked
// completed or failed.
EndTime string `json:"endTime,omitempty"`
// ForceSendFields is a list of field names (e.g. "CreateTime") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "CreateTime") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *GoogleDevtoolsContaineranalysisV1alpha1OperationMetadata) MarshalJSON() ([]byte, error) {
type NoMethod GoogleDevtoolsContaineranalysisV1alpha1OperationMetadata
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// GoogleDevtoolsContaineranalysisV1alpha1ProjectRepoId: Selects a repo
// using a Google Cloud Platform project ID (e.g., winged-cargo-31) and
// a repo name within that project.
type GoogleDevtoolsContaineranalysisV1alpha1ProjectRepoId struct {
// ProjectId: The ID of the project.
ProjectId string `json:"projectId,omitempty"`
// RepoName: The name of the repo. Leave empty for the default repo.
RepoName string `json:"repoName,omitempty"`
// ForceSendFields is a list of field names (e.g. "ProjectId") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "ProjectId") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *GoogleDevtoolsContaineranalysisV1alpha1ProjectRepoId) MarshalJSON() ([]byte, error) {
type NoMethod GoogleDevtoolsContaineranalysisV1alpha1ProjectRepoId
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// GoogleDevtoolsContaineranalysisV1alpha1RepoId: A unique identifier
// for a Cloud Repo.
type GoogleDevtoolsContaineranalysisV1alpha1RepoId struct {
// ProjectRepoId: A combination of a project ID and a repo name.
ProjectRepoId *GoogleDevtoolsContaineranalysisV1alpha1ProjectRepoId `json:"projectRepoId,omitempty"`
// Uid: A server-assigned, globally unique identifier.
Uid string `json:"uid,omitempty"`
// ForceSendFields is a list of field names (e.g. "ProjectRepoId") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "ProjectRepoId") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *GoogleDevtoolsContaineranalysisV1alpha1RepoId) MarshalJSON() ([]byte, error) {
type NoMethod GoogleDevtoolsContaineranalysisV1alpha1RepoId
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// GoogleDevtoolsContaineranalysisV1alpha1SourceContext: A SourceContext
// is a reference to a tree of files. A SourceContext together with a
// path point to a unique revision of a single file or directory.
type GoogleDevtoolsContaineranalysisV1alpha1SourceContext struct {
// CloudRepo: A SourceContext referring to a revision in a Google Cloud
// Source Repo.
CloudRepo *GoogleDevtoolsContaineranalysisV1alpha1CloudRepoSourceContext `json:"cloudRepo,omitempty"`
// Gerrit: A SourceContext referring to a Gerrit project.
Gerrit *GoogleDevtoolsContaineranalysisV1alpha1GerritSourceContext `json:"gerrit,omitempty"`
// Git: A SourceContext referring to any third party Git repo (e.g.,
// GitHub).
Git *GoogleDevtoolsContaineranalysisV1alpha1GitSourceContext `json:"git,omitempty"`
// Labels: Labels with user defined metadata.
Labels map[string]string `json:"labels,omitempty"`
// ForceSendFields is a list of field names (e.g. "CloudRepo") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "CloudRepo") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *GoogleDevtoolsContaineranalysisV1alpha1SourceContext) MarshalJSON() ([]byte, error) {
type NoMethod GoogleDevtoolsContaineranalysisV1alpha1SourceContext
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Hash: Container message for hash values.
type Hash struct {
// Type: The type of hash that was performed.
|
Type string `json:"type,omitempty"`
// Value: The hash value.
Value string `json:"value,omitempty"`
// ForceSendFields is a list of field names (e.g. "Type") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Type") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Hash) MarshalJSON() ([]byte, error) {
type NoMethod Hash
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Installation: This represents how a particular software package may
// be installed on a system.
type Installation struct {
// Location: All of the places within the filesystem versions of this
// package have been found.
Location []*Location `json:"location,omitempty"`
// Name: Output only. The name of the installed package.
Name string `json:"name,omitempty"`
// ForceSendFields is a list of field names (e.g. "Location") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Location") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Installation) MarshalJSON() ([]byte, error) {
type NoMethod Installation
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Layer: Layer holds metadata specific to a layer of a Docker image.
type Layer struct {
// Arguments: The recovered arguments to the Dockerfile directive.
Arguments string `json:"arguments,omitempty"`
// Directive: The recovered Dockerfile directive used to construct this
// layer.
//
// Possible values:
// "DIRECTIVE_UNSPECIFIED" - Default value for unsupported/missing
// directive
// "MAINTAINER" - https://docs.docker.com/engine/reference/builder/
// "RUN" - https://docs.docker.com/engine/reference/builder/
// "CMD" - https://docs.docker.com/engine/reference/builder/
// "LABEL" - https://docs.docker.com/engine/reference/builder/
// "EXPOSE" - https://docs.docker.com/engine/reference/builder/
// "ENV" - https://docs.docker.com/engine/reference/builder/
// "ADD" - https://docs.docker.com/engine/reference/builder/
// "COPY" - https://docs.docker.com/reference/builder/#copy
// "ENTRYPOINT" - https://docs.docker.com/engine/reference/builder/
// "VOLUME" - https://docs.docker.com/engine/reference/builder/
// "USER" - https://docs.docker.com/engine/reference/builder/
// "WORKDIR" - https://docs.docker.com/engine/reference/builder/
// "ARG" - https://docs.docker.com/engine/reference/builder/
// "ONBUILD" - https://docs.docker.com/engine/reference/builder/
// "STOPSIGNAL" - https://docs.docker.com/engine/reference/builder/
// "HEALTHCHECK" - https://docs.docker.com/engine/reference/builder/
// "SHELL" - https://docs.docker.com/engine/reference/builder/
Directive string `json:"directive,omitempty"`
// ForceSendFields is a list of field names (e.g. "Arguments") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Arguments") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Layer) MarshalJSON() ([]byte, error) {
type NoMethod Layer
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// ListNoteOccurrencesResponse: Response including listed occurrences
// for a note.
type ListNoteOccurrencesResponse struct {
// NextPageToken: Token to receive the next page of notes.
NextPageToken string `json:"nextPageToken,omitempty"`
// Occurrences: The occurrences attached to the specified note.
Occurrences []*Occurrence `json:"occurrences,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "NextPageToken") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "NextPageToken") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *ListNoteOccurrencesResponse) MarshalJSON() ([]byte, error) {
type NoMethod ListNoteOccurrencesResponse
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// ListNotesResponse: Response including listed notes.
type ListNotesResponse struct {
// NextPageToken: The next pagination token in the list response. It
// should be used as page_token for the following request. An empty
// value means no more result.
NextPageToken string `json:"nextPageToken,omitempty"`
// Notes: The occurrences requested
Notes []*Note `json:"notes,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "NextPageToken") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "NextPageToken") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *ListNotesResponse) MarshalJSON() ([]byte, error) {
type NoMethod ListNotesResponse
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// ListOccurrencesResponse: Response including listed active
// occurrences.
type ListOccurrencesResponse struct {
// NextPageToken: The next pagination token in the list response. It
// should be used as `page_token` for the following request. An empty
// value means no more results.
NextPageToken string `json:"nextPageToken,omitempty"`
// Occurrences: The occurrences requested.
Occurrences []*Occurrence `json:"occurrences,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "NextPageToken") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "NextPageToken") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *ListOccurrencesResponse) MarshalJSON() ([]byte, error) {
type NoMethod ListOccurrencesResponse
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// ListScanConfigsResponse: A list of scan configs for the project.
type ListScanConfigsResponse struct {
// NextPageToken: A page token to pass in order to get more scan
// configs.
NextPageToken string `json:"nextPageToken,omitempty"`
// ScanConfigs: The set of scan configs.
ScanConfigs []*ScanConfig `json:"scanConfigs,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "NextPageToken") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "NextPageToken") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *ListScanConfigsResponse) MarshalJSON() ([]byte, error) {
type NoMethod ListScanConfigsResponse
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Location: An occurrence of a particular package installation found
// within a system's filesystem. e.g. glibc was found in
// /var/lib/dpkg/status
type Location struct {
// CpeUri: The cpe_uri in [cpe
// format](https://cpe.mitre.org/specification/) denoting the package
// manager version distributing a package.
CpeUri string `json:"cpeUri,omitempty"`
// Path: The path from which we gathered that this package/version is
// installed.
Path string `json:"path,omitempty"`
// Version: The version installed at this location.
Version *Version `json:"version,omitempty"`
// ForceSendFields is a list of field names (e.g. "CpeUri") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "CpeUri") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Location) MarshalJSON() ([]byte, error) {
type NoMethod Location
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Note: Provides a detailed description of a `Note`.
type Note struct {
// AttestationAuthority: A note describing an attestation role.
AttestationAuthority *AttestationAuthority `json:"attestationAuthority,omitempty"`
// BaseImage: A note describing a base image.
BaseImage *Basis `json:"baseImage,omitempty"`
// BuildType: Build provenance type for a verifiable build.
BuildType *BuildType `json:"buildType,omitempty"`
// CreateTime: Output only. The time this note was created. This field
// can be used as a filter in list requests.
CreateTime string `json:"createTime,omitempty"`
// Deployable: A note describing something that can be deployed.
Deployable *Deployable `json:"deployable,omitempty"`
// Discovery: A note describing a provider/analysis type.
Discovery *Discovery `json:"discovery,omitempty"`
// ExpirationTime: Time of expiration for this note, null if note does
// not expire.
ExpirationTime string `json:"expirationTime,omitempty"`
// Kind: Output only. This explicitly denotes which kind of note is
// specified. This field can be used as a filter in list requests.
//
// Possible values:
// "KIND_UNSPECIFIED" - Unknown
// "PACKAGE_VULNERABILITY" - The note and occurrence represent a
// package vulnerability.
// "BUILD_DETAILS" - The note and occurrence assert build provenance.
// "IMAGE_BASIS" - This represents an image basis relationship.
// "PACKAGE_MANAGER" - This represents a package installed via a
// package manager.
// "DEPLOYABLE" - The note and occurrence track deployment events.
// "DISCOVERY" - The note and occurrence track the initial discovery
// status of a resource.
// "ATTESTATION_AUTHORITY" - This represents a logical "role" that can
// attest to artifacts.
// "UPGRADE" - This represents an available software upgrade.
Kind string `json:"kind,omitempty"`
// LongDescription: A detailed description of this `Note`.
LongDescription string `json:"longDescription,omitempty"`
// Name: The name of the note in the form
// "projects/{provider_project_id}/notes/{NOTE_ID}"
Name string `json:"name,omitempty"`
// Package: A note describing a package hosted by various package
// managers.
Package *Package `json:"package,omitempty"`
// RelatedUrl: URLs associated with this note
RelatedUrl []*RelatedUrl `json:"relatedUrl,omitempty"`
// ShortDescription: A one sentence description of this `Note`.
ShortDescription string `json:"shortDescription,omitempty"`
// UpdateTime: Output only. The time this note was last updated. This
// field can be used as a filter in list requests.
UpdateTime string `json:"updateTime,omitempty"`
// Upgrade: A note describing an upgrade.
Upgrade *UpgradeNote `json:"upgrade,omitempty"`
// VulnerabilityType: A package vulnerability type of note.
VulnerabilityType *VulnerabilityType `json:"vulnerabilityType,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g.
// "AttestationAuthority") to unconditionally include in API requests.
// By default, fields with empty values are omitted from API requests.
// However, any non-pointer, non-interface field appearing in
// ForceSendFields will be sent to the server regardless of whether the
// field is empty or not. This may be used to include empty fields in
// Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "AttestationAuthority") to
// include in API requests with the JSON null value. By default, fields
// with empty values are omitted from API requests. However, any field
// with an empty value appearing in NullFields will be sent to the
// server as null. It is an error if a field in this list has a
// non-empty value. This may be used to include null fields in Patch
// requests.
NullFields []string `json:"-"`
}
func (s *Note) MarshalJSON() ([]byte, error) {
type NoMethod Note
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Occurrence: `Occurrence` includes information about analysis
// occurrences for an image.
type Occurrence struct {
// Attestation: Describes an attestation of an artifact.
Attestation *Attestation `json:"attestation,omitempty"`
// BuildDetails: Build details for a verifiable build.
BuildDetails *BuildDetails `json:"buildDetails,omitempty"`
// CreateTime: Output only. The time this `Occurrence` was created.
CreateTime string `json:"createTime,omitempty"`
// Deployment: Describes the deployment of an artifact on a runtime.
Deployment *Deployment `json:"deployment,omitempty"`
// DerivedImage: Describes how this resource derives from the basis in
// the associated note.
DerivedImage *Derived `json:"derivedImage,omitempty"`
// Discovered: Describes the initial scan status for this resource.
Discovered *Discovered `json:"discovered,omitempty"`
// Installation: Describes the installation of a package on the linked
// resource.
Installation *Installation `json:"installation,omitempty"`
// Kind: Output only. This explicitly denotes which of the `Occurrence`
// details are specified. This field can be used as a filter in list
// requests.
//
// Possible values:
// "KIND_UNSPECIFIED" - Unknown
// "PACKAGE_VULNERABILITY" - The note and occurrence represent a
// package vulnerability.
// "BUILD_DETAILS" - The note and occurrence assert build provenance.
// "IMAGE_BASIS" - This represents an image basis relationship.
// "PACKAGE_MANAGER" - This represents a package installed via a
// package manager.
// "DEPLOYABLE" - The note and occurrence track deployment events.
// "DISCOVERY" - The note and occurrence track the initial discovery
// status of a resource.
// "ATTESTATION_AUTHORITY" - This represents a logical "role" that can
// attest to artifacts.
// "UPGRADE" - This represents an available software upgrade.
Kind string `json:"kind,omitempty"`
// Name: Output only. The name of the `Occurrence` in the form
// "projects/{project_id}/occurrences/{OCCURRENCE_ID}"
Name string `json:"name,omitempty"`
// NoteName: An analysis note associated with this image, in the form
// "providers/{provider_id}/notes/{NOTE_ID}" This field can be used as a
// filter in list requests.
NoteName string `json:"noteName,omitempty"`
// Remediation: A description of actions that can be taken to remedy the
// `Note`
Remediation string `json:"remediation,omitempty"`
// Resource: The resource for which the `Occurrence` applies.
Resource *Resource `json:"resource,omitempty"`
// ResourceUrl: The unique URL of the image or the container for which
// the `Occurrence` applies. For example,
// https://gcr.io/project/image@sha256:foo This field can be used as a
// filter in list requests.
ResourceUrl string `json:"resourceUrl,omitempty"`
// UpdateTime: Output only. The time this `Occurrence` was last updated.
UpdateTime string `json:"updateTime,omitempty"`
// Upgrade: Describes an upgrade.
Upgrade *UpgradeOccurrence `json:"upgrade,omitempty"`
// VulnerabilityDetails: Details of a security vulnerability note.
VulnerabilityDetails *VulnerabilityDetails `json:"vulnerabilityDetails,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "Attestation") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Attestation") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Occurrence) MarshalJSON() ([]byte, error) {
type NoMethod Occurrence
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Operation: This resource represents a long-running operation that is
// the result of a network API call.
type Operation struct {
// Done: If the value is `false`, it means the operation is still in
// progress. If `true`, the operation is completed, and either `error`
// or `response` is available.
Done bool `json:"done,omitempty"`
// Error: The error result of the operation in case of failure or
// cancellation.
Error *Status `json:"error,omitempty"`
// Metadata: Service-specific metadata associated with the operation. It
// typically contains progress information and common metadata such as
// create time. Some services might not provide such metadata. Any
// method that returns a long-running operation should document the
// metadata type, if any.
Metadata googleapi.RawMessage `json:"metadata,omitempty"`
// Name: The server-assigned name, which is only unique within the same
// service that originally returns it. If you use the default HTTP
// mapping, the `name` should be a resource name ending with
// `operations/{unique_id}`.
Name string `json:"name,omitempty"`
// Response: The normal response of the operation in case of success. If
// the original method returns no data on success, such as `Delete`, the
// response is `google.protobuf.Empty`. If the original method is
// standard `Get`/`Create`/`Update`, the response should be the
// resource. For other methods, the response should have the type
// `XxxResponse`, where `Xxx` is the original method name. For example,
// if the original method name is `TakeSnapshot()`, the inferred
// response type is `TakeSnapshotResponse`.
Response googleapi.RawMessage `json:"response,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "Done") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Done") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Operation) MarshalJSON() ([]byte, error) {
type NoMethod Operation
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Package: This represents a particular package that is distributed
// over various channels. e.g. glibc (aka libc6) is distributed by many,
// at various versions.
type Package struct {
// Distribution: The various channels by which a package is distributed.
Distribution []*Distribution `json:"distribution,omitempty"`
// Name: The name of the package.
Name string `json:"name,omitempty"`
// ForceSendFields is a list of field names (e.g. "Distribution") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Distribution") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Package) MarshalJSON() ([]byte, error) {
type NoMethod Package
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// PackageIssue: This message wraps a location affected by a
// vulnerability and its associated fix (if one is available).
type PackageIssue struct {
// AffectedLocation: The location of the vulnerability.
AffectedLocation *VulnerabilityLocation `json:"affectedLocation,omitempty"`
// FixedLocation: The location of the available fix for vulnerability.
FixedLocation *VulnerabilityLocation `json:"fixedLocation,omitempty"`
SeverityName string `json:"severityName,omitempty"`
// ForceSendFields is a list of field names (e.g. "AffectedLocation") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "AffectedLocation") to
// include in API requests with the JSON null value. By default, fields
// with empty values are omitted from API requests. However, any field
// with an empty value appearing in NullFields will be sent to the
// server as null. It is an error if a field in this list has a
// non-empty value. This may be used to include null fields in Patch
// requests.
NullFields []string `json:"-"`
}
func (s *PackageIssue) MarshalJSON() ([]byte, error) {
type NoMethod PackageIssue
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// PgpSignedAttestation: An attestation wrapper with a PGP-compatible
// signature. This message only supports `ATTACHED` signatures, where
// the payload that is signed is included alongside the signature itself
// in the same file.
type PgpSignedAttestation struct {
// ContentType: Type (for example schema) of the attestation payload
// that was signed. The verifier must ensure that the provided type is
// one that the verifier supports, and that the attestation payload is a
// valid instantiation of that type (for example by validating a JSON
// schema).
//
// Possible values:
// "CONTENT_TYPE_UNSPECIFIED" - `ContentType` is not set.
// "SIMPLE_SIGNING_JSON" - Atomic format attestation signature. See
// https://github.com/containers/image/blob/8a5d2f82a6e3263290c8e0276c3e0f64e77723e7/docs/atomic-signature.md The payload extracted from `signature` is a JSON blob conforming to the linked
// schema.
ContentType string `json:"contentType,omitempty"`
// PgpKeyId: The cryptographic fingerprint of the key used to generate
// the signature, as output by, e.g. `gpg --list-keys`. This should be
// the version 4, full 160-bit fingerprint, expressed as a 40 character
// hexadecimal string. See
// https://tools.ietf.org/html/rfc4880#section-12.2 for details.
// Implementations may choose to acknowledge "LONG", "SHORT", or other
// abbreviated key IDs, but only the full fingerprint is guaranteed to
// work. In gpg, the full fingerprint can be retrieved from the `fpr`
// field returned when calling --list-keys with --with-colons. For
// example: ``` gpg --with-colons --with-fingerprint --force-v4-certs \
// --list-keys [email protected] tru::1:1513631572:0:3:1:5 pub:......
// fpr:::::::::24FF6481B76AC91E66A00AC657A93A81EF3AE6FB: ``` Above, the
// fingerprint is `24FF6481B76AC91E66A00AC657A93A81EF3AE6FB`.
PgpKeyId string `json:"pgpKeyId,omitempty"`
// Signature: The raw content of the signature, as output by GNU Privacy
// Guard (GPG) or equivalent. Since this message only supports attached
// signatures, the payload that was signed must be attached. While the
// signature format supported is dependent on the verification
// implementation, currently only ASCII-armored (`--armor` to gpg),
// non-clearsigned (`--sign` rather than `--clearsign` to gpg) are
// supported. Concretely, `gpg --sign --armor --output=signature.gpg
// payload.json` will create the signature content expected in this
// field in `signature.gpg` for the `payload.json` attestation payload.
Signature string `json:"signature,omitempty"`
// ForceSendFields is a list of field names (e.g. "ContentType") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "ContentType") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *PgpSignedAttestation) MarshalJSON() ([]byte, error) {
type NoMethod PgpSignedAttestation
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Policy: An Identity and Access Management (IAM) policy, which
// specifies access controls for Google Cloud resources. A `Policy` is a
// collection of `bindings`. A `binding` binds one or more `members` to
// a single `role`. Members can be user accounts, service accounts,
// Google groups, and domains (such as G Suite). A `role` is a named
// list of permissions; each `role` can be an IAM predefined role or a
// user-created custom role. For some types of Google Cloud resources, a
// `binding` can also specify a `condition`, which is a logical
// expression that allows access to a resource only if the expression
// evaluates to `true`. A condition can add constraints based on
// attributes of the request, the resource, or both. To learn which
// resources support conditions in their IAM policies, see the [IAM
// documentation](https://cloud.google.com/iam/help/conditions/resource-p
// olicies). **JSON example:** { "bindings": [ { "role":
// "roles/resourcemanager.organizationAdmin", "members": [
// "user:[email protected]", "group:[email protected]",
// "domain:google.com",
// "serviceAccount:[email protected]" ] }, {
// "role": "roles/resourcemanager.organizationViewer", "members": [
// "user:[email protected]" ], "condition": { "title": "expirable access",
// "description": "Does not grant access after Sep 2020", "expression":
// "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ],
// "etag": "BwWWja0YfJA=", "version": 3 } **YAML example:** bindings: -
// members: - user:[email protected] - group:[email protected] -
// domain:google.com -
// serviceAccount:[email protected] role:
// roles/resourcemanager.organizationAdmin - members: -
// user:[email protected] role: roles/resourcemanager.organizationViewer
// condition: title: expirable access description: Does not grant access
// after Sep 2020 expression: request.time <
// timestamp('2020-10-01T00:00:00.000Z') - etag: BwWWja0YfJA= - version:
// 3 For a description of IAM and its features, see the [IAM
// documentation](https://cloud.google.com/iam/docs/).
type Policy struct {
// Bindings: Associates a list of `members` to a `role`. Optionally, may
// specify a `condition` that determines how and when the `bindings` are
// applied. Each of the `bindings` must contain at least one member.
Bindings []*Binding `json:"bindings,omitempty"`
// Etag: `etag` is used for optimistic concurrency control as a way to
// help prevent simultaneous updates of a policy from overwriting each
// other. It is strongly suggested that systems make use of the `etag`
// in the read-modify-write cycle to perform policy updates in order to
// avoid race conditions: An `etag` is returned in the response to
// `getIamPolicy`, and systems are expected to put that etag in the
// request to `setIamPolicy` to ensure that their change will be applied
// to the same version of the policy. **Important:** If you use IAM
// Conditions, you must include the `etag` field whenever you call
// `setIamPolicy`. If you omit this field, then IAM allows you to
// overwrite a version `3` policy with a version `1` policy, and all of
// the conditions in the version `3` policy are lost.
Etag string `json:"etag,omitempty"`
// Version: Specifies the format of the policy. Valid values are `0`,
// `1`, and `3`. Requests that specify an invalid value are rejected.
// Any operation that affects conditional role bindings must specify
// version `3`. This requirement applies to the following operations: *
// Getting a policy that includes a conditional role binding * Adding a
// conditional role binding to a policy * Changing a conditional role
// binding in a policy * Removing any role binding, with or without a
// condition, from a policy that includes conditions **Important:** If
// you use IAM Conditions, you must include the `etag` field whenever
// you call `setIamPolicy`. If you omit this field, then IAM allows you
// to overwrite a version `3` policy with a version `1` policy, and all
// of the conditions in the version `3` policy are lost. If a policy
// does not include any conditions, operations on that policy may
// specify any valid version or leave the field unset. To learn which
// resources support conditions in their IAM policies, see the [IAM
// documentation](https://cloud.google.com/iam/help/conditions/resource-p
// olicies).
Version int64 `json:"version,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "Bindings") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Bindings") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Policy) MarshalJSON() ([]byte, error) {
type NoMethod Policy
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// RelatedUrl: Metadata for any related URL information
type RelatedUrl struct {
// Label: Label to describe usage of the URL
Label string `json:"label,omitempty"`
// Url: Specific URL to associate with the note
Url string `json:"url,omitempty"`
// ForceSendFields is a list of field names (e.g. "Label") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Label") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *RelatedUrl) MarshalJSON() ([]byte, error) {
type NoMethod RelatedUrl
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// RepoSource: RepoSource describes the location of the source in a
// Google Cloud Source Repository.
type RepoSource struct {
// BranchName: Name of the branch to build.
BranchName string `json:"branchName,omitempty"`
// CommitSha: Explicit commit SHA to build.
CommitSha string `json:"commitSha,omitempty"`
// ProjectId: ID of the project that owns the repo.
ProjectId string `json:"projectId,omitempty"`
// RepoName: Name of the repo.
RepoName string `json:"repoName,omitempty"`
// TagName: Name of the tag to build.
TagName string `json:"tagName,omitempty"`
// ForceSendFields is a list of field names (e.g. "BranchName") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "BranchName") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *RepoSource) MarshalJSON() ([]byte, error) {
type NoMethod RepoSource
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Resource: Resource is an entity that can have metadata. E.g., a
// Docker image.
type Resource struct {
// ContentHash: The hash of the resource content. E.g., the Docker
// digest.
ContentHash *Hash `json:"contentHash,omitempty"`
// Name: The name of the resource. E.g., the name of a Docker image -
// "Debian".
Name string `json:"name,omitempty"`
// Uri: The unique URI of the resource. E.g.,
// "https://gcr.io/project/image@sha256:foo" for a Docker image.
Uri string `json:"uri,omitempty"`
// ForceSendFields is a list of field names (e.g. "ContentHash") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "ContentHash") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Resource) MarshalJSON() ([]byte, error) {
type NoMethod Resource
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// ScanConfig: Indicates various scans and whether they are turned on or
// off.
type ScanConfig struct {
// CreateTime: Output only. The time this scan config was created.
CreateTime string `json:"createTime,omitempty"`
// Description: Output only. A human-readable description of what the
// `ScanConfig` does.
Description string `json:"description,omitempty"`
// Enabled: Indicates whether the Scan is enabled.
Enabled bool `json:"enabled,omitempty"`
// Name: Output only. The name of the ScanConfig in the form
// “projects/{project_id}/scanConfigs/{scan_config_id}".
Name string `json:"name,omitempty"`
// UpdateTime: Output only. The time this scan config was last updated.
UpdateTime string `json:"updateTime,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "CreateTime") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "CreateTime") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *ScanConfig) MarshalJSON() ([]byte, error) {
type NoMethod ScanConfig
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// SetIamPolicyRequest: Request message for `SetIamPolicy` method.
type SetIamPolicyRequest struct {
// Policy: REQUIRED: The complete policy to be applied to the
// `resource`. The size of the policy is limited to a few 10s of KB. An
// empty policy is a valid policy but certain Cloud Platform services
// (such as Projects) might reject them.
Policy *Policy `json:"policy,omitempty"`
// ForceSendFields is a list of field names (e.g. "Policy") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Policy") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *SetIamPolicyRequest) MarshalJSON() ([]byte, error) {
type NoMethod SetIamPolicyRequest
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// SeverityCount: The number of occurrences created for a specific
// severity.
type SeverityCount struct {
// Count: The number of occurrences with the severity.
Count int64 `json:"count,omitempty,string"`
// Severity: The severity of the occurrences.
//
// Possible values:
// "SEVERITY_UNSPECIFIED" - Unknown Impact
// "MINIMAL" - Minimal Impact
// "LOW" - Low Impact
// "MEDIUM" - Medium Impact
// "HIGH" - High Impact
// "CRITICAL" - Critical Impact
Severity string `json:"severity,omitempty"`
// ForceSendFields is a list of field names (e.g. "Count") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Count") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *SeverityCount) MarshalJSON() ([]byte, error) {
type NoMethod SeverityCount
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Source: Source describes the location of the source used for the
// build.
type Source struct {
// AdditionalContexts: If provided, some of the source code used for the
// build may be found in these locations, in the case where the source
// repository had multiple remotes or submodules. This list will not
// include the context specified in the context field.
AdditionalContexts []*GoogleDevtoolsContaineranalysisV1alpha1SourceContext `json:"additionalContexts,omitempty"`
// ArtifactStorageSource: If provided, the input binary artifacts for
// the build came from this location.
ArtifactStorageSource *StorageSource `json:"artifactStorageSource,omitempty"`
// Context: If provided, the source code used for the build came from
// this location.
Context *GoogleDevtoolsContaineranalysisV1alpha1SourceContext `json:"context,omitempty"`
// FileHashes: Hash(es) of the build source, which can be used to verify
// that the original source integrity was maintained in the build. The
// keys to this map are file paths used as build source and the values
// contain the hash values for those files. If the build source came in
// a single package such as a gzipped tarfile (.tar.gz), the FileHash
// will be for the single path to that file.
FileHashes map[string]FileHashes `json:"fileHashes,omitempty"`
// RepoSource: If provided, get source from this location in a Cloud
// Repo.
RepoSource *RepoSource `json:"repoSource,omitempty"`
// StorageSource: If provided, get the source from this location in in
// Google Cloud Storage.
StorageSource *StorageSource `json:"storageSource,omitempty"`
// ForceSendFields is a list of field names (e.g. "AdditionalContexts")
// to unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "AdditionalContexts") to
// include in API requests with the JSON null value. By default, fields
// with empty values are omitted from API requests. However, any field
// with an empty value appearing in NullFields will be sent to the
// server as null. It is an error if a field in this list has a
// non-empty value. This may be used to include null fields in Patch
// requests.
NullFields []string `json:"-"`
}
func (s *Source) MarshalJSON() ([]byte, error) {
type NoMethod Source
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Status: The `Status` type defines a logical error model that is
// suitable for different programming environments, including REST APIs
// and RPC APIs. It is used by [gRPC](https://github.com/grpc). Each
// `Status` message contains three pieces of data: error code, error
// message, and error details. You can find out more about this error
// model and how to work with it in the [API Design
// Guide](https://cloud.google.com/apis/design/errors).
type Status struct {
// Code: The status code, which should be an enum value of
// google.rpc.Code.
Code int64 `json:"code,omitempty"`
// Details: A list of messages that carry the error details. There is a
// common set of message types for APIs to use.
Details []googleapi.RawMessage `json:"details,omitempty"`
// Message: A developer-facing error message, which should be in
// English. Any user-facing error message should be localized and sent
// in the google.rpc.Status.details field, or localized by the client.
Message string `json:"message,omitempty"`
// ForceSendFields is a list of field names (e.g. "Code") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Code") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Status) MarshalJSON() ([]byte, error) {
type NoMethod Status
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// StorageSource: StorageSource describes the location of the source in
// an archive file in Google Cloud Storage.
type StorageSource struct {
// Bucket: Google Cloud Storage bucket containing source (see [Bucket
// Name Requirements]
// (https://cloud.google.com/storage/docs/bucket-naming#requirements)).
Bucket string `json:"bucket,omitempty"`
// Generation: Google Cloud Storage generation for the object.
Generation int64 `json:"generation,omitempty,string"`
// Object: Google Cloud Storage object containing source.
Object string `json:"object,omitempty"`
// ForceSendFields is a list of field names (e.g. "Bucket") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Bucket") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *StorageSource) MarshalJSON() ([]byte, error) {
type NoMethod StorageSource
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// TestIamPermissionsRequest: Request message for `TestIamPermissions`
// method.
type TestIamPermissionsRequest struct {
// Permissions: The set of permissions to check for the `resource`.
// Permissions with wildcards (such as '*' or 'storage.*') are not
// allowed. For more information see [IAM
// Overview](https://cloud.google.com/iam/docs/overview#permissions).
Permissions []string `json:"permissions,omitempty"`
// ForceSendFields is a list of field names (e.g. "Permissions") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Permissions") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *TestIamPermissionsRequest) MarshalJSON() ([]byte, error) {
type NoMethod TestIamPermissionsRequest
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// TestIamPermissionsResponse: Response message for `TestIamPermissions`
// method.
type TestIamPermissionsResponse struct {
// Permissions: A subset of `TestPermissionsRequest.permissions` that
// the caller is allowed.
Permissions []string `json:"permissions,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "Permissions") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Permissions") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *TestIamPermissionsResponse) MarshalJSON() ([]byte, error) {
type NoMethod TestIamPermissionsResponse
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// UpdateOperationRequest: Request for updating an existing operation
type UpdateOperationRequest struct {
// Operation: The operation to create.
Operation *Operation `json:"operation,omitempty"`
// UpdateMask: The fields to update.
UpdateMask string `json:"updateMask,omitempty"`
// ForceSendFields is a list of field names (e.g. "Operation") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Operation") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *UpdateOperationRequest) MarshalJSON() ([]byte, error) {
type NoMethod UpdateOperationRequest
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// UpgradeDistribution: The Upgrade Distribution represents metadata
// about the Upgrade for each operating system (CPE). Some distributions
// have additional metadata around updates, classifying them into
// various categories and severities.
type UpgradeDistribution struct {
// Classification: The operating system classification of this Upgrade,
// as specified by the upstream operating system upgrade feed.
Classification string `json:"classification,omitempty"`
// CpeUri: Required - The specific operating system this metadata
// applies to. See https://cpe.mitre.org/specification/.
CpeUri string `json:"cpeUri,omitempty"`
// Cve: The cve that would be resolved by this upgrade.
Cve []string `json:"cve,omitempty"`
// Severity: The severity as specified by the upstream operating system.
Severity string `json:"severity,omitempty"`
// ForceSendFields is a list of field names (e.g. "Classification") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Classification") to
// include in API requests with the JSON null value. By default, fields
// with empty values are omitted from API requests. However, any field
// with an empty value appearing in NullFields will be sent to the
// server as null. It is an error if a field in this list has a
// non-empty value. This may be used to include null fields in Patch
// requests.
NullFields []string `json:"-"`
}
func (s *UpgradeDistribution) MarshalJSON() ([]byte, error) {
type NoMethod UpgradeDistribution
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// UpgradeNote: An Upgrade Note represents a potential upgrade of a
// package to a given version. For each package version combination
// (i.e. bash 4.0, bash 4.1, bash 4.1.2), there will be a Upgrade Note.
type UpgradeNote struct {
// Distributions: Metadata about the upgrade for each specific operating
// system.
Distributions []*UpgradeDistribution `json:"distributions,omitempty"`
// Package: Required - The package this Upgrade is for.
Package string `json:"package,omitempty"`
// Version: Required - The version of the package in machine + human
// readable form.
Version *Version `json:"version,omitempty"`
// ForceSendFields is a list of field names (e.g. "Distributions") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Distributions") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *UpgradeNote) MarshalJSON() ([]byte, error) {
type NoMethod UpgradeNote
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// UpgradeOccurrence: An Upgrade Occurrence represents that a specific
// resource_url could install a specific upgrade. This presence is
// supplied via local sources (i.e. it is present in the mirror and the
// running system has noticed its availability).
type UpgradeOccurrence struct {
// Distribution: Metadata about the upgrade for available for the
// specific operating system for the resource_url. This allows efficient
// filtering, as well as making it easier to use the occurrence.
Distribution *UpgradeDistribution `json:"distribution,omitempty"`
// Package: Required - The package this Upgrade is for.
Package string `json:"package,omitempty"`
// ParsedVersion: Required - The version of the package in a machine +
// human readable form.
ParsedVersion *Version `json:"parsedVersion,omitempty"`
// ForceSendFields is a list of field names (e.g. "Distribution") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Distribution") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *UpgradeOccurrence) MarshalJSON() ([]byte, error) {
type NoMethod UpgradeOccurrence
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Version: Version contains structured information about the version of
// the package. For a discussion of this in Debian/Ubuntu:
// http://serverfault.com/questions/604541/debian-packages-version-convention For a discussion of this in Redhat/Fedora/Centos:
// http://blog.jasonantman.com/2014/07/how-yum-and-rpm-compare-versions/
type Version struct {
// Epoch: Used to correct mistakes in the version numbering scheme.
Epoch int64 `json:"epoch,omitempty"`
// Kind: Distinguish between sentinel MIN/MAX versions and normal
// versions. If kind is not NORMAL, then the other fields are ignored.
//
// Possible values:
// "NORMAL" - A standard package version, defined by the other fields.
// "MINIMUM" - A special version representing negative infinity, other
// fields are ignored.
// "MAXIMUM" - A special version representing positive infinity, other
// fields are ignored.
Kind string `json:"kind,omitempty"`
// Name: The main part of the version name.
Name string `json:"name,omitempty"`
// Revision: The iteration of the package build from the above version.
Revision string `json:"revision,omitempty"`
// ForceSendFields is a list of field names (e.g. "Epoch") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Epoch") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Version) MarshalJSON() ([]byte, error) {
type NoMethod Version
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// VulnerabilityDetails: Used by Occurrence to point to where the
// vulnerability exists and how to fix it.
type VulnerabilityDetails struct {
// CvssScore: Output only. The CVSS score of this vulnerability. CVSS
// score is on a scale of 0-10 where 0 indicates low severity and 10
// indicates high severity.
CvssScore float64 `json:"cvssScore,omitempty"`
// EffectiveSeverity: The distro assigned severity for this
// vulnerability when that is available and note provider assigned
// severity when distro has not yet assigned a severity for this
// vulnerability.
//
// Possible values:
// "SEVERITY_UNSPECIFIED" - Unknown Impact
// "MINIMAL" - Minimal Impact
// "LOW" - Low Impact
// "MEDIUM" - Medium Impact
// "HIGH" - High Impact
// "CRITICAL" - Critical Impact
EffectiveSeverity string `json:"effectiveSeverity,omitempty"`
// PackageIssue: The set of affected locations and their fixes (if
// available) within the associated resource.
PackageIssue []*PackageIssue `json:"packageIssue,omitempty"`
// Severity: Output only. The note provider assigned Severity of the
// vulnerability.
//
// Possible values:
// "SEVERITY_UNSPECIFIED" - Unknown Impact
// "MINIMAL" - Minimal Impact
// "LOW" - Low Impact
// "MEDIUM" - Medium Impact
// "HIGH" - High Impact
// "CRITICAL" - Critical Impact
Severity string `json:"severity,omitempty"`
// Type: The type of package; whether native or non native(ruby gems,
// node.js packages etc)
Type string `json:"type,omitempty"`
// ForceSendFields is a list of field names (e.g. "CvssScore") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "CvssScore") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *VulnerabilityDetails) MarshalJSON() ([]byte, error) {
type NoMethod VulnerabilityDetails
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
func (s *VulnerabilityDetails) UnmarshalJSON(data []byte) error {
type NoMethod VulnerabilityDetails
var s1 struct {
CvssScore gensupport.JSONFloat64 `json:"cvssScore"`
*NoMethod
}
s1.NoMethod = (*NoMethod)(s)
if err := json.Unmarshal(data, &s1); err != nil {
return err
}
s.CvssScore = float64(s1.CvssScore)
return nil
}
// VulnerabilityLocation: The location of the vulnerability
type VulnerabilityLocation struct {
// CpeUri: The cpe_uri in [cpe format]
// (https://cpe.mitre.org/specification/) format. Examples include
// distro or storage location for vulnerable jar. This field can be used
// as a filter in list requests.
CpeUri string `json:"cpeUri,omitempty"`
// Package: The package being described.
Package string `json:"package,omitempty"`
// Version: The version of the package being described. This field can
// be used as a filter in list requests.
Version *Version `json:"version,omitempty"`
// ForceSendFields is a list of field names (e.g. "CpeUri") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "CpeUri") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *VulnerabilityLocation) MarshalJSON() ([]byte, error) {
type NoMethod VulnerabilityLocation
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// VulnerabilityType: VulnerabilityType provides metadata about a
// security vulnerability.
type VulnerabilityType struct {
// CvssScore: The CVSS score for this Vulnerability.
CvssScore float64 `json:"cvssScore,omitempty"`
// Details: All information about the package to specifically identify
// this vulnerability. One entry per (version range and cpe_uri) the
// package vulnerability has manifested in.
Details []*Detail `json:"details,omitempty"`
// Severity: Note provider assigned impact of the vulnerability
//
// Possible values:
// "SEVERITY_UNSPECIFIED" - Unknown Impact
// "MINIMAL" - Minimal Impact
// "LOW" - Low Impact
// "MEDIUM" - Medium Impact
// "HIGH" - High Impact
// "CRITICAL" - Critical Impact
Severity string `json:"severity,omitempty"`
// ForceSendFields is a list of field names (e.g. "CvssScore") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "CvssScore") to include in
// API requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *VulnerabilityType) MarshalJSON() ([]byte, error) {
type NoMethod VulnerabilityType
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
func (s *VulnerabilityType) UnmarshalJSON(data []byte) error {
type NoMethod VulnerabilityType
var s1 struct {
CvssScore gensupport.JSONFloat64 `json:"cvssScore"`
*NoMethod
}
s1.NoMethod = (*NoMethod)(s)
if err := json.Unmarshal(data, &s1); err != nil {
return err
}
s.CvssScore = float64(s1.CvssScore)
return nil
}
// method id "containeranalysis.projects.notes.create":
type ProjectsNotesCreateCall struct {
s *Service
parent string
note *Note
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Create: Creates a new `Note`.
func (r *ProjectsNotesService) Create(parent string, note *Note) *ProjectsNotesCreateCall {
c := &ProjectsNotesCreateCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.parent = parent
c.note = note
return c
}
// Name sets the optional parameter "name": The name of the project.
// Should be of the form "providers/{provider_id}". @Deprecated
func (c *ProjectsNotesCreateCall) Name(name string) *ProjectsNotesCreateCall {
c.urlParams_.Set("name", name)
return c
}
// NoteId sets the optional parameter "noteId": The ID to use for this
// note.
func (c *ProjectsNotesCreateCall) NoteId(noteId string) *ProjectsNotesCreateCall {
c.urlParams_.Set("noteId", noteId)
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsNotesCreateCall) Fields(s ...googleapi.Field) *ProjectsNotesCreateCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsNotesCreateCall) Context(ctx context.Context) *ProjectsNotesCreateCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsNotesCreateCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsNotesCreateCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.note)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+parent}/notes")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("POST", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"parent": c.parent,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.notes.create" call.
// Exactly one of *Note or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Note.ServerResponse.Header or (if a response was returned at all) in
// error.(*googleapi.Error).Header. Use googleapi.IsNotModified to check
// whether the returned error was because http.StatusNotModified was
// returned.
func (c *ProjectsNotesCreateCall) Do(opts ...googleapi.CallOption) (*Note, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Note{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Creates a new `Note`.",
// "flatPath": "v1alpha1/projects/{projectsId}/notes",
// "httpMethod": "POST",
// "id": "containeranalysis.projects.notes.create",
// "parameterOrder": [
// "parent"
// ],
// "parameters": {
// "name": {
// "description": "The name of the project. Should be of the form \"providers/{provider_id}\". @Deprecated",
// "location": "query",
// "type": "string"
// },
// "noteId": {
// "description": "The ID to use for this note.",
// "location": "query",
// "type": "string"
// },
// "parent": {
// "description": "This field contains the project Id for example: \"projects/{project_id}",
// "location": "path",
// "pattern": "^projects/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+parent}/notes",
// "request": {
// "$ref": "Note"
// },
// "response": {
// "$ref": "Note"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.projects.notes.delete":
type ProjectsNotesDeleteCall struct {
s *Service
name string
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Delete: Deletes the given `Note` from the system.
func (r *ProjectsNotesService) Delete(name string) *ProjectsNotesDeleteCall {
c := &ProjectsNotesDeleteCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsNotesDeleteCall) Fields(s ...googleapi.Field) *ProjectsNotesDeleteCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsNotesDeleteCall) Context(ctx context.Context) *ProjectsNotesDeleteCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsNotesDeleteCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsNotesDeleteCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("DELETE", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.notes.delete" call.
// Exactly one of *Empty or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Empty.ServerResponse.Header or (if a response was returned at all)
// in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to
// check whether the returned error was because http.StatusNotModified
// was returned.
func (c *ProjectsNotesDeleteCall) Do(opts ...googleapi.CallOption) (*Empty, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Empty{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Deletes the given `Note` from the system.",
// "flatPath": "v1alpha1/projects/{projectsId}/notes/{notesId}",
// "httpMethod": "DELETE",
// "id": "containeranalysis.projects.notes.delete",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "name": {
// "description": "The name of the note in the form of \"providers/{provider_id}/notes/{NOTE_ID}\"",
// "location": "path",
// "pattern": "^projects/[^/]+/notes/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}",
// "response": {
// "$ref": "Empty"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.projects.notes.get":
type ProjectsNotesGetCall struct {
s *Service
name string
urlParams_ gensupport.URLParams
ifNoneMatch_ string
ctx_ context.Context
header_ http.Header
}
// Get: Returns the requested `Note`.
func (r *ProjectsNotesService) Get(name string) *ProjectsNotesGetCall {
c := &ProjectsNotesGetCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsNotesGetCall) Fields(s ...googleapi.Field) *ProjectsNotesGetCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// IfNoneMatch sets the optional parameter which makes the operation
// fail if the object's ETag matches the given value. This is useful for
// getting updates only after the object has changed since the last
// request. Use googleapi.IsNotModified to check whether the response
// error from Do is the result of In-None-Match.
func (c *ProjectsNotesGetCall) IfNoneMatch(entityTag string) *ProjectsNotesGetCall {
c.ifNoneMatch_ = entityTag
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsNotesGetCall) Context(ctx context.Context) *ProjectsNotesGetCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsNotesGetCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsNotesGetCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
if c.ifNoneMatch_ != "" {
reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
}
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.notes.get" call.
// Exactly one of *Note or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Note.ServerResponse.Header or (if a response was returned at all) in
// error.(*googleapi.Error).Header. Use googleapi.IsNotModified to check
// whether the returned error was because http.StatusNotModified was
// returned.
func (c *ProjectsNotesGetCall) Do(opts ...googleapi.CallOption) (*Note, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Note{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Returns the requested `Note`.",
// "flatPath": "v1alpha1/projects/{projectsId}/notes/{notesId}",
// "httpMethod": "GET",
// "id": "containeranalysis.projects.notes.get",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "name": {
// "description": "The name of the note in the form of \"providers/{provider_id}/notes/{NOTE_ID}\"",
// "location": "path",
// "pattern": "^projects/[^/]+/notes/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}",
// "response": {
// "$ref": "Note"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.projects.notes.getIamPolicy":
type ProjectsNotesGetIamPolicyCall struct {
s *Service
resource string
getiampolicyrequest *GetIamPolicyRequest
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// GetIamPolicy: Gets the access control policy for a note or an
// `Occurrence` resource. Requires
// `containeranalysis.notes.setIamPolicy` or
// `containeranalysis.occurrences.setIamPolicy` permission if the
// resource is a note or occurrence, respectively. Attempting to call
// this method on a resource without the required permission will result
// in a `PERMISSION_DENIED` error. Attempting to call this method on a
// non-existent resource will result in a `NOT_FOUND` error if the user
// has list permission on the project, or a `PERMISSION_DENIED` error
// otherwise. The resource takes the following formats:
// `projects/{PROJECT_ID}/occurrences/{OCCURRENCE_ID}` for occurrences
// and projects/{PROJECT_ID}/notes/{NOTE_ID} for notes
func (r *ProjectsNotesService) GetIamPolicy(resource string, getiampolicyrequest *GetIamPolicyRequest) *ProjectsNotesGetIamPolicyCall {
c := &ProjectsNotesGetIamPolicyCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.resource = resource
c.getiampolicyrequest = getiampolicyrequest
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsNotesGetIamPolicyCall) Fields(s ...googleapi.Field) *ProjectsNotesGetIamPolicyCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsNotesGetIamPolicyCall) Context(ctx context.Context) *ProjectsNotesGetIamPolicyCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsNotesGetIamPolicyCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsNotesGetIamPolicyCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.getiampolicyrequest)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+resource}:getIamPolicy")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("POST", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"resource": c.resource,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.notes.getIamPolicy" call.
// Exactly one of *Policy or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Policy.ServerResponse.Header or (if a response was returned at all)
// in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to
// check whether the returned error was because http.StatusNotModified
// was returned.
func (c *ProjectsNotesGetIamPolicyCall) Do(opts ...googleapi.CallOption) (*Policy, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Policy{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Gets the access control policy for a note or an `Occurrence` resource. Requires `containeranalysis.notes.setIamPolicy` or `containeranalysis.occurrences.setIamPolicy` permission if the resource is a note or occurrence, respectively. Attempting to call this method on a resource without the required permission will result in a `PERMISSION_DENIED` error. Attempting to call this method on a non-existent resource will result in a `NOT_FOUND` error if the user has list permission on the project, or a `PERMISSION_DENIED` error otherwise. The resource takes the following formats: `projects/{PROJECT_ID}/occurrences/{OCCURRENCE_ID}` for occurrences and projects/{PROJECT_ID}/notes/{NOTE_ID} for notes",
// "flatPath": "v1alpha1/projects/{projectsId}/notes/{notesId}:getIamPolicy",
// "httpMethod": "POST",
// "id": "containeranalysis.projects.notes.getIamPolicy",
// "parameterOrder": [
// "resource"
// ],
// "parameters": {
// "resource": {
// "description": "REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field.",
// "location": "path",
// "pattern": "^projects/[^/]+/notes/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+resource}:getIamPolicy",
// "request": {
// "$ref": "GetIamPolicyRequest"
// },
// "response": {
// "$ref": "Policy"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.projects.notes.list":
type ProjectsNotesListCall struct {
s *Service
parent string
urlParams_ gensupport.URLParams
ifNoneMatch_ string
ctx_ context.Context
header_ http.Header
}
// List: Lists all `Notes` for a given project.
func (r *ProjectsNotesService) List(parent string) *ProjectsNotesListCall {
c := &ProjectsNotesListCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.parent = parent
return c
}
// Filter sets the optional parameter "filter": The filter expression.
func (c *ProjectsNotesListCall) Filter(filter string) *ProjectsNotesListCall {
c.urlParams_.Set("filter", filter)
return c
}
// Name sets the optional parameter "name": The name field will contain
// the project Id for example: "providers/{provider_id} @Deprecated
func (c *ProjectsNotesListCall) Name(name string) *ProjectsNotesListCall {
c.urlParams_.Set("name", name)
return c
}
// PageSize sets the optional parameter "pageSize": Number of notes to
// return in the list.
func (c *ProjectsNotesListCall) PageSize(pageSize int64) *ProjectsNotesListCall {
c.urlParams_.Set("pageSize", fmt.Sprint(pageSize))
return c
}
// PageToken sets the optional parameter "pageToken": Token to provide
// to skip to a particular spot in the list.
func (c *ProjectsNotesListCall) PageToken(pageToken string) *ProjectsNotesListCall {
c.urlParams_.Set("pageToken", pageToken)
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsNotesListCall) Fields(s ...googleapi.Field) *ProjectsNotesListCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// IfNoneMatch sets the optional parameter which makes the operation
// fail if the object's ETag matches the given value. This is useful for
// getting updates only after the object has changed since the last
// request. Use googleapi.IsNotModified to check whether the response
// error from Do is the result of In-None-Match.
func (c *ProjectsNotesListCall) IfNoneMatch(entityTag string) *ProjectsNotesListCall {
c.ifNoneMatch_ = entityTag
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsNotesListCall) Context(ctx context.Context) *ProjectsNotesListCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsNotesListCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsNotesListCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
if c.ifNoneMatch_ != "" {
reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
}
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+parent}/notes")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"parent": c.parent,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.notes.list" call.
// Exactly one of *ListNotesResponse or error will be non-nil. Any
// non-2xx status code is an error. Response headers are in either
// *ListNotesResponse.ServerResponse.Header or (if a response was
// returned at all) in error.(*googleapi.Error).Header. Use
// googleapi.IsNotModified to check whether the returned error was
// because http.StatusNotModified was returned.
func (c *ProjectsNotesListCall) Do(opts ...googleapi.CallOption) (*ListNotesResponse, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &ListNotesResponse{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Lists all `Notes` for a given project.",
// "flatPath": "v1alpha1/projects/{projectsId}/notes",
// "httpMethod": "GET",
// "id": "containeranalysis.projects.notes.list",
// "parameterOrder": [
// "parent"
// ],
// "parameters": {
// "filter": {
// "description": "The filter expression.",
// "location": "query",
// "type": "string"
// },
// "name": {
// "description": "The name field will contain the project Id for example: \"providers/{provider_id} @Deprecated",
// "location": "query",
// "type": "string"
// },
// "pageSize": {
// "description": "Number of notes to return in the list.",
// "format": "int32",
// "location": "query",
// "type": "integer"
// },
// "pageToken": {
// "description": "Token to provide to skip to a particular spot in the list.",
// "location": "query",
// "type": "string"
// },
// "parent": {
// "description": "This field contains the project Id for example: \"projects/{PROJECT_ID}\".",
// "location": "path",
// "pattern": "^projects/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+parent}/notes",
// "response": {
// "$ref": "ListNotesResponse"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// Pages invokes f for each page of results.
// A non-nil error returned from f will halt the iteration.
// The provided context supersedes any context provided to the Context method.
func (c *ProjectsNotesListCall) Pages(ctx context.Context, f func(*ListNotesResponse) error) error {
c.ctx_ = ctx
defer c.PageToken(c.urlParams_.Get("pageToken")) // reset paging to original point
for {
x, err := c.Do()
if err != nil {
return err
}
if err := f(x); err != nil {
return err
}
if x.NextPageToken == "" {
return nil
}
c.PageToken(x.NextPageToken)
}
}
// method id "containeranalysis.projects.notes.patch":
type ProjectsNotesPatchCall struct {
s *Service
name string
note *Note
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Patch: Updates an existing `Note`.
func (r *ProjectsNotesService) Patch(name string, note *Note) *ProjectsNotesPatchCall {
c := &ProjectsNotesPatchCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
c.note = note
return c
}
// UpdateMask sets the optional parameter "updateMask": The fields to
// update.
func (c *ProjectsNotesPatchCall) UpdateMask(updateMask string) *ProjectsNotesPatchCall {
c.urlParams_.Set("updateMask", updateMask)
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsNotesPatchCall) Fields(s ...googleapi.Field) *ProjectsNotesPatchCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsNotesPatchCall) Context(ctx context.Context) *ProjectsNotesPatchCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsNotesPatchCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsNotesPatchCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.note)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("PATCH", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.notes.patch" call.
// Exactly one of *Note or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Note.ServerResponse.Header or (if a response was returned at all) in
// error.(*googleapi.Error).Header. Use googleapi.IsNotModified to check
// whether the returned error was because http.StatusNotModified was
// returned.
func (c *ProjectsNotesPatchCall) Do(opts ...googleapi.CallOption) (*Note, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Note{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Updates an existing `Note`.",
// "flatPath": "v1alpha1/projects/{projectsId}/notes/{notesId}",
// "httpMethod": "PATCH",
// "id": "containeranalysis.projects.notes.patch",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "name": {
// "description": "The name of the note. Should be of the form \"projects/{provider_id}/notes/{note_id}\".",
// "location": "path",
// "pattern": "^projects/[^/]+/notes/[^/]+$",
// "required": true,
// "type": "string"
// },
// "updateMask": {
// "description": "The fields to update.",
// "format": "google-fieldmask",
// "location": "query",
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}",
// "request": {
// "$ref": "Note"
// },
// "response": {
// "$ref": "Note"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.projects.notes.setIamPolicy":
type ProjectsNotesSetIamPolicyCall struct {
s *Service
resource string
setiampolicyrequest *SetIamPolicyRequest
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// SetIamPolicy: Sets the access control policy on the specified `Note`
// or `Occurrence`. Requires `containeranalysis.notes.setIamPolicy` or
// `containeranalysis.occurrences.setIamPolicy` permission if the
// resource is a `Note` or an `Occurrence`, respectively. Attempting to
// call this method without these permissions will result in a `
// `PERMISSION_DENIED` error. Attempting to call this method on a
// non-existent resource will result in a `NOT_FOUND` error if the user
// has `containeranalysis.notes.list` permission on a `Note` or
// `containeranalysis.occurrences.list` on an `Occurrence`, or a
// `PERMISSION_DENIED` error otherwise. The resource takes the following
// formats: `projects/{projectid}/occurrences/{occurrenceid}` for
// occurrences and projects/{projectid}/notes/{noteid} for notes
func (r *ProjectsNotesService) SetIamPolicy(resource string, setiampolicyrequest *SetIamPolicyRequest) *ProjectsNotesSetIamPolicyCall {
c := &ProjectsNotesSetIamPolicyCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.resource = resource
c.setiampolicyrequest = setiampolicyrequest
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsNotesSetIamPolicyCall) Fields(s ...googleapi.Field) *ProjectsNotesSetIamPolicyCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsNotesSetIamPolicyCall) Context(ctx context.Context) *ProjectsNotesSetIamPolicyCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsNotesSetIamPolicyCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsNotesSetIamPolicyCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.setiampolicyrequest)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+resource}:setIamPolicy")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("POST", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"resource": c.resource,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.notes.setIamPolicy" call.
// Exactly one of *Policy or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Policy.ServerResponse.Header or (if a response was returned at all)
// in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to
// check whether the returned error was because http.StatusNotModified
// was returned.
func (c *ProjectsNotesSetIamPolicyCall) Do(opts ...googleapi.CallOption) (*Policy, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Policy{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Sets the access control policy on the specified `Note` or `Occurrence`. Requires `containeranalysis.notes.setIamPolicy` or `containeranalysis.occurrences.setIamPolicy` permission if the resource is a `Note` or an `Occurrence`, respectively. Attempting to call this method without these permissions will result in a ` `PERMISSION_DENIED` error. Attempting to call this method on a non-existent resource will result in a `NOT_FOUND` error if the user has `containeranalysis.notes.list` permission on a `Note` or `containeranalysis.occurrences.list` on an `Occurrence`, or a `PERMISSION_DENIED` error otherwise. The resource takes the following formats: `projects/{projectid}/occurrences/{occurrenceid}` for occurrences and projects/{projectid}/notes/{noteid} for notes",
// "flatPath": "v1alpha1/projects/{projectsId}/notes/{notesId}:setIamPolicy",
// "httpMethod": "POST",
// "id": "containeranalysis.projects.notes.setIamPolicy",
// "parameterOrder": [
// "resource"
// ],
// "parameters": {
// "resource": {
// "description": "REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field.",
// "location": "path",
// "pattern": "^projects/[^/]+/notes/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+resource}:setIamPolicy",
// "request": {
// "$ref": "SetIamPolicyRequest"
// },
// "response": {
// "$ref": "Policy"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.projects.notes.testIamPermissions":
type ProjectsNotesTestIamPermissionsCall struct {
s *Service
resource string
testiampermissionsrequest *TestIamPermissionsRequest
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// TestIamPermissions: Returns the permissions that a caller has on the
// specified note or occurrence resource. Requires list permission on
// the project (for example, "storage.objects.list" on the containing
// bucket for testing permission of an object). Attempting to call this
// method on a non-existent resource will result in a `NOT_FOUND` error
// if the user has list permission on the project, or a
// `PERMISSION_DENIED` error otherwise. The resource takes the following
// formats: `projects/{PROJECT_ID}/occurrences/{OCCURRENCE_ID}` for
// `Occurrences` and `projects/{PROJECT_ID}/notes/{NOTE_ID}` for `Notes`
func (r *ProjectsNotesService) TestIamPermissions(resource string, testiampermissionsrequest *TestIamPermissionsRequest) *ProjectsNotesTestIamPermissionsCall {
c := &ProjectsNotesTestIamPermissionsCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.resource = resource
c.testiampermissionsrequest = testiampermissionsrequest
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsNotesTestIamPermissionsCall) Fields(s ...googleapi.Field) *ProjectsNotesTestIamPermissionsCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsNotesTestIamPermissionsCall) Context(ctx context.Context) *ProjectsNotesTestIamPermissionsCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsNotesTestIamPermissionsCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsNotesTestIamPermissionsCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.testiampermissionsrequest)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+resource}:testIamPermissions")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("POST", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"resource": c.resource,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.notes.testIamPermissions" call.
// Exactly one of *TestIamPermissionsResponse or error will be non-nil.
// Any non-2xx status code is an error. Response headers are in either
// *TestIamPermissionsResponse.ServerResponse.Header or (if a response
// was returned at all) in error.(*googleapi.Error).Header. Use
// googleapi.IsNotModified to check whether the returned error was
// because http.StatusNotModified was returned.
func (c *ProjectsNotesTestIamPermissionsCall) Do(opts ...googleapi.CallOption) (*TestIamPermissionsResponse, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &TestIamPermissionsResponse{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Returns the permissions that a caller has on the specified note or occurrence resource. Requires list permission on the project (for example, \"storage.objects.list\" on the containing bucket for testing permission of an object). Attempting to call this method on a non-existent resource will result in a `NOT_FOUND` error if the user has list permission on the project, or a `PERMISSION_DENIED` error otherwise. The resource takes the following formats: `projects/{PROJECT_ID}/occurrences/{OCCURRENCE_ID}` for `Occurrences` and `projects/{PROJECT_ID}/notes/{NOTE_ID}` for `Notes`",
// "flatPath": "v1alpha1/projects/{projectsId}/notes/{notesId}:testIamPermissions",
// "httpMethod": "POST",
// "id": "containeranalysis.projects.notes.testIamPermissions",
// "parameterOrder": [
// "resource"
// ],
// "parameters": {
// "resource": {
// "description": "REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field.",
// "location": "path",
// "pattern": "^projects/[^/]+/notes/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+resource}:testIamPermissions",
// "request": {
// "$ref": "TestIamPermissionsRequest"
// },
// "response": {
// "$ref": "TestIamPermissionsResponse"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.projects.notes.occurrences.list":
type ProjectsNotesOccurrencesListCall struct {
s *Service
name string
urlParams_ gensupport.URLParams
ifNoneMatch_ string
ctx_ context.Context
header_ http.Header
}
// List: Lists `Occurrences` referencing the specified `Note`. Use this
// method to get all occurrences referencing your `Note` across all your
// customer projects.
func (r *ProjectsNotesOccurrencesService) List(name string) *ProjectsNotesOccurrencesListCall {
c := &ProjectsNotesOccurrencesListCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
return c
}
// Filter sets the optional parameter "filter": The filter expression.
func (c *ProjectsNotesOccurrencesListCall) Filter(filter string) *ProjectsNotesOccurrencesListCall {
c.urlParams_.Set("filter", filter)
return c
}
// PageSize sets the optional parameter "pageSize": Number of notes to
// return in the list.
func (c *ProjectsNotesOccurrencesListCall) PageSize(pageSize int64) *ProjectsNotesOccurrencesListCall {
c.urlParams_.Set("pageSize", fmt.Sprint(pageSize))
return c
}
// PageToken sets the optional parameter "pageToken": Token to provide
// to skip to a particular spot in the list.
func (c *ProjectsNotesOccurrencesListCall) PageToken(pageToken string) *ProjectsNotesOccurrencesListCall {
c.urlParams_.Set("pageToken", pageToken)
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsNotesOccurrencesListCall) Fields(s ...googleapi.Field) *ProjectsNotesOccurrencesListCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// IfNoneMatch sets the optional parameter which makes the operation
// fail if the object's ETag matches the given value. This is useful for
// getting updates only after the object has changed since the last
// request. Use googleapi.IsNotModified to check whether the response
// error from Do is the result of In-None-Match.
func (c *ProjectsNotesOccurrencesListCall) IfNoneMatch(entityTag string) *ProjectsNotesOccurrencesListCall {
c.ifNoneMatch_ = entityTag
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsNotesOccurrencesListCall) Context(ctx context.Context) *ProjectsNotesOccurrencesListCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsNotesOccurrencesListCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsNotesOccurrencesListCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
if c.ifNoneMatch_ != "" {
reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
}
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}/occurrences")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.notes.occurrences.list" call.
// Exactly one of *ListNoteOccurrencesResponse or error will be non-nil.
// Any non-2xx status code is an error. Response headers are in either
// *ListNoteOccurrencesResponse.ServerResponse.Header or (if a response
// was returned at all) in error.(*googleapi.Error).Header. Use
// googleapi.IsNotModified to check whether the returned error was
// because http.StatusNotModified was returned.
func (c *ProjectsNotesOccurrencesListCall) Do(opts ...googleapi.CallOption) (*ListNoteOccurrencesResponse, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &ListNoteOccurrencesResponse{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Lists `Occurrences` referencing the specified `Note`. Use this method to get all occurrences referencing your `Note` across all your customer projects.",
// "flatPath": "v1alpha1/projects/{projectsId}/notes/{notesId}/occurrences",
// "httpMethod": "GET",
// "id": "containeranalysis.projects.notes.occurrences.list",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "filter": {
// "description": "The filter expression.",
// "location": "query",
// "type": "string"
// },
// "name": {
// "description": "The name field will contain the note name for example: \"provider/{provider_id}/notes/{note_id}\"",
// "location": "path",
// "pattern": "^projects/[^/]+/notes/[^/]+$",
// "required": true,
// "type": "string"
// },
// "pageSize": {
// "description": "Number of notes to return in the list.",
// "format": "int32",
// "location": "query",
// "type": "integer"
// },
// "pageToken": {
// "description": "Token to provide to skip to a particular spot in the list.",
// "location": "query",
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}/occurrences",
// "response": {
// "$ref": "ListNoteOccurrencesResponse"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// Pages invokes f for each page of results.
// A non-nil error returned from f will halt the iteration.
// The provided context supersedes any context provided to the Context method.
func (c *ProjectsNotesOccurrencesListCall) Pages(ctx context.Context, f func(*ListNoteOccurrencesResponse) error) error {
c.ctx_ = ctx
defer c.PageToken(c.urlParams_.Get("pageToken")) // reset paging to original point
for {
x, err := c.Do()
if err != nil {
return err
}
if err := f(x); err != nil {
return err
}
if x.NextPageToken == "" {
return nil
}
c.PageToken(x.NextPageToken)
}
}
// method id "containeranalysis.projects.occurrences.create":
type ProjectsOccurrencesCreateCall struct {
s *Service
parent string
occurrence *Occurrence
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Create: Creates a new `Occurrence`. Use this method to create
// `Occurrences` for a resource.
func (r *ProjectsOccurrencesService) Create(parent string, occurrence *Occurrence) *ProjectsOccurrencesCreateCall {
c := &ProjectsOccurrencesCreateCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.parent = parent
c.occurrence = occurrence
return c
}
// Name sets the optional parameter "name": The name of the project.
// Should be of the form "projects/{project_id}". @Deprecated
func (c *ProjectsOccurrencesCreateCall) Name(name string) *ProjectsOccurrencesCreateCall {
c.urlParams_.Set("name", name)
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsOccurrencesCreateCall) Fields(s ...googleapi.Field) *ProjectsOccurrencesCreateCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsOccurrencesCreateCall) Context(ctx context.Context) *ProjectsOccurrencesCreateCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsOccurrencesCreateCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsOccurrencesCreateCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.occurrence)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+parent}/occurrences")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("POST", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"parent": c.parent,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.occurrences.create" call.
// Exactly one of *Occurrence or error will be non-nil. Any non-2xx
// status code is an error. Response headers are in either
// *Occurrence.ServerResponse.Header or (if a response was returned at
// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
// to check whether the returned error was because
// http.StatusNotModified was returned.
func (c *ProjectsOccurrencesCreateCall) Do(opts ...googleapi.CallOption) (*Occurrence, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Occurrence{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Creates a new `Occurrence`. Use this method to create `Occurrences` for a resource.",
// "flatPath": "v1alpha1/projects/{projectsId}/occurrences",
// "httpMethod": "POST",
// "id": "containeranalysis.projects.occurrences.create",
// "parameterOrder": [
// "parent"
// ],
// "parameters": {
// "name": {
// "description": "The name of the project. Should be of the form \"projects/{project_id}\". @Deprecated",
// "location": "query",
// "type": "string"
// },
// "parent": {
// "description": "This field contains the project Id for example: \"projects/{project_id}\"",
// "location": "path",
// "pattern": "^projects/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+parent}/occurrences",
// "request": {
// "$ref": "Occurrence"
// },
// "response": {
// "$ref": "Occurrence"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.projects.occurrences.delete":
type ProjectsOccurrencesDeleteCall struct {
s *Service
name string
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Delete: Deletes the given `Occurrence` from the system. Use this when
// an `Occurrence` is no longer applicable for the given resource.
func (r *ProjectsOccurrencesService) Delete(name string) *ProjectsOccurrencesDeleteCall {
c := &ProjectsOccurrencesDeleteCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsOccurrencesDeleteCall) Fields(s ...googleapi.Field) *ProjectsOccurrencesDeleteCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsOccurrencesDeleteCall) Context(ctx context.Context) *ProjectsOccurrencesDeleteCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsOccurrencesDeleteCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsOccurrencesDeleteCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("DELETE", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.occurrences.delete" call.
// Exactly one of *Empty or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Empty.ServerResponse.Header or (if a response was returned at all)
// in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to
// check whether the returned error was because http.StatusNotModified
// was returned.
func (c *ProjectsOccurrencesDeleteCall) Do(opts ...googleapi.CallOption) (*Empty, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Empty{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Deletes the given `Occurrence` from the system. Use this when an `Occurrence` is no longer applicable for the given resource.",
// "flatPath": "v1alpha1/projects/{projectsId}/occurrences/{occurrencesId}",
// "httpMethod": "DELETE",
// "id": "containeranalysis.projects.occurrences.delete",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "name": {
// "description": "The name of the occurrence in the form of \"projects/{project_id}/occurrences/{OCCURRENCE_ID}\"",
// "location": "path",
// "pattern": "^projects/[^/]+/occurrences/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}",
// "response": {
// "$ref": "Empty"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.projects.occurrences.get":
type ProjectsOccurrencesGetCall struct {
s *Service
name string
urlParams_ gensupport.URLParams
ifNoneMatch_ string
ctx_ context.Context
header_ http.Header
}
// Get: Returns the requested `Occurrence`.
func (r *ProjectsOccurrencesService) Get(name string) *ProjectsOccurrencesGetCall {
c := &ProjectsOccurrencesGetCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsOccurrencesGetCall) Fields(s ...googleapi.Field) *ProjectsOccurrencesGetCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// IfNoneMatch sets the optional parameter which makes the operation
// fail if the object's ETag matches the given value. This is useful for
// getting updates only after the object has changed since the last
// request. Use googleapi.IsNotModified to check whether the response
// error from Do is the result of In-None-Match.
func (c *ProjectsOccurrencesGetCall) IfNoneMatch(entityTag string) *ProjectsOccurrencesGetCall {
c.ifNoneMatch_ = entityTag
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsOccurrencesGetCall) Context(ctx context.Context) *ProjectsOccurrencesGetCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsOccurrencesGetCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsOccurrencesGetCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
if c.ifNoneMatch_ != "" {
reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
}
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.occurrences.get" call.
// Exactly one of *Occurrence or error will be non-nil. Any non-2xx
// status code is an error. Response headers are in either
// *Occurrence.ServerResponse.Header or (if a response was returned at
// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
// to check whether the returned error was because
// http.StatusNotModified was returned.
func (c *ProjectsOccurrencesGetCall) Do(opts ...googleapi.CallOption) (*Occurrence, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Occurrence{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Returns the requested `Occurrence`.",
// "flatPath": "v1alpha1/projects/{projectsId}/occurrences/{occurrencesId}",
// "httpMethod": "GET",
// "id": "containeranalysis.projects.occurrences.get",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "name": {
// "description": "The name of the occurrence of the form \"projects/{project_id}/occurrences/{OCCURRENCE_ID}\"",
// "location": "path",
// "pattern": "^projects/[^/]+/occurrences/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}",
// "response": {
// "$ref": "Occurrence"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.projects.occurrences.getIamPolicy":
type ProjectsOccurrencesGetIamPolicyCall struct {
s *Service
resource string
getiampolicyrequest *GetIamPolicyRequest
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// GetIamPolicy: Gets the access control policy for a note or an
// `Occurrence` resource. Requires
// `containeranalysis.notes.setIamPolicy` or
// `containeranalysis.occurrences.setIamPolicy` permission if the
// resource is a note or occurrence, respectively. Attempting to call
// this method on a resource without the required permission will result
// in a `PERMISSION_DENIED` error. Attempting to call this method on a
// non-existent resource will result in a `NOT_FOUND` error if the user
// has list permission on the project, or a `PERMISSION_DENIED` error
// otherwise. The resource takes the following formats:
// `projects/{PROJECT_ID}/occurrences/{OCCURRENCE_ID}` for occurrences
// and projects/{PROJECT_ID}/notes/{NOTE_ID} for notes
func (r *ProjectsOccurrencesService) GetIamPolicy(resource string, getiampolicyrequest *GetIamPolicyRequest) *ProjectsOccurrencesGetIamPolicyCall {
c := &ProjectsOccurrencesGetIamPolicyCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.resource = resource
c.getiampolicyrequest = getiampolicyrequest
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsOccurrencesGetIamPolicyCall) Fields(s ...googleapi.Field) *ProjectsOccurrencesGetIamPolicyCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsOccurrencesGetIamPolicyCall) Context(ctx context.Context) *ProjectsOccurrencesGetIamPolicyCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsOccurrencesGetIamPolicyCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsOccurrencesGetIamPolicyCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.getiampolicyrequest)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+resource}:getIamPolicy")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("POST", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"resource": c.resource,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.occurrences.getIamPolicy" call.
// Exactly one of *Policy or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Policy.ServerResponse.Header or (if a response was returned at all)
// in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to
// check whether the returned error was because http.StatusNotModified
// was returned.
func (c *ProjectsOccurrencesGetIamPolicyCall) Do(opts ...googleapi.CallOption) (*Policy, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Policy{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Gets the access control policy for a note or an `Occurrence` resource. Requires `containeranalysis.notes.setIamPolicy` or `containeranalysis.occurrences.setIamPolicy` permission if the resource is a note or occurrence, respectively. Attempting to call this method on a resource without the required permission will result in a `PERMISSION_DENIED` error. Attempting to call this method on a non-existent resource will result in a `NOT_FOUND` error if the user has list permission on the project, or a `PERMISSION_DENIED` error otherwise. The resource takes the following formats: `projects/{PROJECT_ID}/occurrences/{OCCURRENCE_ID}` for occurrences and projects/{PROJECT_ID}/notes/{NOTE_ID} for notes",
// "flatPath": "v1alpha1/projects/{projectsId}/occurrences/{occurrencesId}:getIamPolicy",
// "httpMethod": "POST",
// "id": "containeranalysis.projects.occurrences.getIamPolicy",
// "parameterOrder": [
// "resource"
// ],
// "parameters": {
// "resource": {
// "description": "REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field.",
// "location": "path",
// "pattern": "^projects/[^/]+/occurrences/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+resource}:getIamPolicy",
// "request": {
// "$ref": "GetIamPolicyRequest"
// },
// "response": {
// "$ref": "Policy"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.projects.occurrences.getNotes":
type ProjectsOccurrencesGetNotesCall struct {
s *Service
name string
urlParams_ gensupport.URLParams
ifNoneMatch_ string
ctx_ context.Context
header_ http.Header
}
// GetNotes: Gets the `Note` attached to the given `Occurrence`.
func (r *ProjectsOccurrencesService) GetNotes(name string) *ProjectsOccurrencesGetNotesCall {
c := &ProjectsOccurrencesGetNotesCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsOccurrencesGetNotesCall) Fields(s ...googleapi.Field) *ProjectsOccurrencesGetNotesCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// IfNoneMatch sets the optional parameter which makes the operation
// fail if the object's ETag matches the given value. This is useful for
// getting updates only after the object has changed since the last
// request. Use googleapi.IsNotModified to check whether the response
// error from Do is the result of In-None-Match.
func (c *ProjectsOccurrencesGetNotesCall) IfNoneMatch(entityTag string) *ProjectsOccurrencesGetNotesCall {
c.ifNoneMatch_ = entityTag
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsOccurrencesGetNotesCall) Context(ctx context.Context) *ProjectsOccurrencesGetNotesCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsOccurrencesGetNotesCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsOccurrencesGetNotesCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
if c.ifNoneMatch_ != "" {
reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
}
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}/notes")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.occurrences.getNotes" call.
// Exactly one of *Note or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Note.ServerResponse.Header or (if a response was returned at all) in
// error.(*googleapi.Error).Header. Use googleapi.IsNotModified to check
// whether the returned error was because http.StatusNotModified was
// returned.
func (c *ProjectsOccurrencesGetNotesCall) Do(opts ...googleapi.CallOption) (*Note, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Note{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Gets the `Note` attached to the given `Occurrence`.",
// "flatPath": "v1alpha1/projects/{projectsId}/occurrences/{occurrencesId}/notes",
// "httpMethod": "GET",
// "id": "containeranalysis.projects.occurrences.getNotes",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "name": {
// "description": "The name of the occurrence in the form \"projects/{project_id}/occurrences/{OCCURRENCE_ID}\"",
// "location": "path",
// "pattern": "^projects/[^/]+/occurrences/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}/notes",
// "response": {
// "$ref": "Note"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.projects.occurrences.getVulnerabilitySummary":
type ProjectsOccurrencesGetVulnerabilitySummaryCall struct {
s *Service
parent string
urlParams_ gensupport.URLParams
ifNoneMatch_ string
ctx_ context.Context
header_ http.Header
}
// GetVulnerabilitySummary: Gets a summary of the number and severity of
// occurrences.
func (r *ProjectsOccurrencesService) GetVulnerabilitySummary(parent string) *ProjectsOccurrencesGetVulnerabilitySummaryCall {
c := &ProjectsOccurrencesGetVulnerabilitySummaryCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.parent = parent
return c
}
// Filter sets the optional parameter "filter": The filter expression.
func (c *ProjectsOccurrencesGetVulnerabilitySummaryCall) Filter(filter string) *ProjectsOccurrencesGetVulnerabilitySummaryCall {
c.urlParams_.Set("filter", filter)
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsOccurrencesGetVulnerabilitySummaryCall) Fields(s ...googleapi.Field) *ProjectsOccurrencesGetVulnerabilitySummaryCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// IfNoneMatch sets the optional parameter which makes the operation
// fail if the object's ETag matches the given value. This is useful for
// getting updates only after the object has changed since the last
// request. Use googleapi.IsNotModified to check whether the response
// error from Do is the result of In-None-Match.
func (c *ProjectsOccurrencesGetVulnerabilitySummaryCall) IfNoneMatch(entityTag string) *ProjectsOccurrencesGetVulnerabilitySummaryCall {
c.ifNoneMatch_ = entityTag
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsOccurrencesGetVulnerabilitySummaryCall) Context(ctx context.Context) *ProjectsOccurrencesGetVulnerabilitySummaryCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsOccurrencesGetVulnerabilitySummaryCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsOccurrencesGetVulnerabilitySummaryCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
if c.ifNoneMatch_ != "" {
reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
}
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+parent}/occurrences:vulnerabilitySummary")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"parent": c.parent,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.occurrences.getVulnerabilitySummary" call.
// Exactly one of *GetVulnzOccurrencesSummaryResponse or error will be
// non-nil. Any non-2xx status code is an error. Response headers are in
// either *GetVulnzOccurrencesSummaryResponse.ServerResponse.Header or
// (if a response was returned at all) in
// error.(*googleapi.Error).Header. Use googleapi.IsNotModified to check
// whether the returned error was because http.StatusNotModified was
// returned.
func (c *ProjectsOccurrencesGetVulnerabilitySummaryCall) Do(opts ...googleapi.CallOption) (*GetVulnzOccurrencesSummaryResponse, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &GetVulnzOccurrencesSummaryResponse{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Gets a summary of the number and severity of occurrences.",
// "flatPath": "v1alpha1/projects/{projectsId}/occurrences:vulnerabilitySummary",
// "httpMethod": "GET",
// "id": "containeranalysis.projects.occurrences.getVulnerabilitySummary",
// "parameterOrder": [
// "parent"
// ],
// "parameters": {
// "filter": {
// "description": "The filter expression.",
// "location": "query",
// "type": "string"
// },
// "parent": {
// "description": "This contains the project Id for example: projects/{project_id}",
// "location": "path",
// "pattern": "^projects/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+parent}/occurrences:vulnerabilitySummary",
// "response": {
// "$ref": "GetVulnzOccurrencesSummaryResponse"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.projects.occurrences.list":
type ProjectsOccurrencesListCall struct {
s *Service
parent string
urlParams_ gensupport.URLParams
ifNoneMatch_ string
ctx_ context.Context
header_ http.Header
}
// List: Lists active `Occurrences` for a given project matching the
// filters.
func (r *ProjectsOccurrencesService) List(parent string) *ProjectsOccurrencesListCall {
c := &ProjectsOccurrencesListCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.parent = parent
return c
}
// Filter sets the optional parameter "filter": The filter expression.
func (c *ProjectsOccurrencesListCall) Filter(filter string) *ProjectsOccurrencesListCall {
c.urlParams_.Set("filter", filter)
return c
}
// Kind sets the optional parameter "kind": The kind of occurrences to
// filter on.
//
// Possible values:
// "KIND_UNSPECIFIED" - Unknown
// "PACKAGE_VULNERABILITY" - The note and occurrence represent a
// package vulnerability.
// "BUILD_DETAILS" - The note and occurrence assert build provenance.
// "IMAGE_BASIS" - This represents an image basis relationship.
// "PACKAGE_MANAGER" - This represents a package installed via a
// package manager.
// "DEPLOYABLE" - The note and occurrence track deployment events.
// "DISCOVERY" - The note and occurrence track the initial discovery
// status of a resource.
// "ATTESTATION_AUTHORITY" - This represents a logical "role" that can
// attest to artifacts.
// "UPGRADE" - This represents an available software upgrade.
func (c *ProjectsOccurrencesListCall) Kind(kind string) *ProjectsOccurrencesListCall {
c.urlParams_.Set("kind", kind)
return c
}
// Name sets the optional parameter "name": The name field contains the
// project Id. For example: "projects/{project_id} @Deprecated
func (c *ProjectsOccurrencesListCall) Name(name string) *ProjectsOccurrencesListCall {
c.urlParams_.Set("name", name)
return c
}
// PageSize sets the optional parameter "pageSize": Number of
// occurrences to return in the list.
func (c *ProjectsOccurrencesListCall) PageSize(pageSize int64) *ProjectsOccurrencesListCall {
c.urlParams_.Set("pageSize", fmt.Sprint(pageSize))
return c
}
// PageToken sets the optional parameter "pageToken": Token to provide
// to skip to a particular spot in the list.
func (c *ProjectsOccurrencesListCall) PageToken(pageToken string) *ProjectsOccurrencesListCall {
c.urlParams_.Set("pageToken", pageToken)
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsOccurrencesListCall) Fields(s ...googleapi.Field) *ProjectsOccurrencesListCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// IfNoneMatch sets the optional parameter which makes the operation
// fail if the object's ETag matches the given value. This is useful for
// getting updates only after the object has changed since the last
// request. Use googleapi.IsNotModified to check whether the response
// error from Do is the result of In-None-Match.
func (c *ProjectsOccurrencesListCall) IfNoneMatch(entityTag string) *ProjectsOccurrencesListCall {
c.ifNoneMatch_ = entityTag
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsOccurrencesListCall) Context(ctx context.Context) *ProjectsOccurrencesListCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsOccurrencesListCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsOccurrencesListCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
if c.ifNoneMatch_ != "" {
reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
}
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+parent}/occurrences")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"parent": c.parent,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.occurrences.list" call.
// Exactly one of *ListOccurrencesResponse or error will be non-nil. Any
// non-2xx status code is an error. Response headers are in either
// *ListOccurrencesResponse.ServerResponse.Header or (if a response was
// returned at all) in error.(*googleapi.Error).Header. Use
// googleapi.IsNotModified to check whether the returned error was
// because http.StatusNotModified was returned.
func (c *ProjectsOccurrencesListCall) Do(opts ...googleapi.CallOption) (*ListOccurrencesResponse, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &ListOccurrencesResponse{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Lists active `Occurrences` for a given project matching the filters.",
// "flatPath": "v1alpha1/projects/{projectsId}/occurrences",
// "httpMethod": "GET",
// "id": "containeranalysis.projects.occurrences.list",
// "parameterOrder": [
// "parent"
// ],
// "parameters": {
// "filter": {
// "description": "The filter expression.",
// "location": "query",
// "type": "string"
// },
// "kind": {
// "description": "The kind of occurrences to filter on.",
// "enum": [
// "KIND_UNSPECIFIED",
// "PACKAGE_VULNERABILITY",
// "BUILD_DETAILS",
// "IMAGE_BASIS",
// "PACKAGE_MANAGER",
// "DEPLOYABLE",
// "DISCOVERY",
// "ATTESTATION_AUTHORITY",
// "UPGRADE"
// ],
// "enumDescriptions": [
// "Unknown",
// "The note and occurrence represent a package vulnerability.",
// "The note and occurrence assert build provenance.",
// "This represents an image basis relationship.",
// "This represents a package installed via a package manager.",
// "The note and occurrence track deployment events.",
// "The note and occurrence track the initial discovery status of a resource.",
// "This represents a logical \"role\" that can attest to artifacts.",
// "This represents an available software upgrade."
// ],
// "location": "query",
// "type": "string"
// },
// "name": {
// "description": "The name field contains the project Id. For example: \"projects/{project_id} @Deprecated",
// "location": "query",
// "type": "string"
// },
// "pageSize": {
// "description": "Number of occurrences to return in the list.",
// "format": "int32",
// "location": "query",
// "type": "integer"
// },
// "pageToken": {
// "description": "Token to provide to skip to a particular spot in the list.",
// "location": "query",
// "type": "string"
// },
// "parent": {
// "description": "This contains the project Id for example: projects/{project_id}.",
// "location": "path",
// "pattern": "^projects/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+parent}/occurrences",
// "response": {
// "$ref": "ListOccurrencesResponse"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// Pages invokes f for each page of results.
// A non-nil error returned from f will halt the iteration.
// The provided context supersedes any context provided to the Context method.
func (c *ProjectsOccurrencesListCall) Pages(ctx context.Context, f func(*ListOccurrencesResponse) error) error {
c.ctx_ = ctx
defer c.PageToken(c.urlParams_.Get("pageToken")) // reset paging to original point
for {
x, err := c.Do()
if err != nil {
return err
}
if err := f(x); err != nil {
return err
}
if x.NextPageToken == "" {
return nil
}
c.PageToken(x.NextPageToken)
}
}
// method id "containeranalysis.projects.occurrences.patch":
type ProjectsOccurrencesPatchCall struct {
s *Service
name string
occurrence *Occurrence
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Patch: Updates an existing occurrence.
func (r *ProjectsOccurrencesService) Patch(name string, occurrence *Occurrence) *ProjectsOccurrencesPatchCall {
c := &ProjectsOccurrencesPatchCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
c.occurrence = occurrence
return c
}
// UpdateMask sets the optional parameter "updateMask": The fields to
// update.
func (c *ProjectsOccurrencesPatchCall) UpdateMask(updateMask string) *ProjectsOccurrencesPatchCall {
c.urlParams_.Set("updateMask", updateMask)
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsOccurrencesPatchCall) Fields(s ...googleapi.Field) *ProjectsOccurrencesPatchCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsOccurrencesPatchCall) Context(ctx context.Context) *ProjectsOccurrencesPatchCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsOccurrencesPatchCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsOccurrencesPatchCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.occurrence)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("PATCH", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.occurrences.patch" call.
// Exactly one of *Occurrence or error will be non-nil. Any non-2xx
// status code is an error. Response headers are in either
// *Occurrence.ServerResponse.Header or (if a response was returned at
// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
// to check whether the returned error was because
// http.StatusNotModified was returned.
func (c *ProjectsOccurrencesPatchCall) Do(opts ...googleapi.CallOption) (*Occurrence, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Occurrence{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Updates an existing occurrence.",
// "flatPath": "v1alpha1/projects/{projectsId}/occurrences/{occurrencesId}",
// "httpMethod": "PATCH",
// "id": "containeranalysis.projects.occurrences.patch",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "name": {
// "description": "The name of the occurrence. Should be of the form \"projects/{project_id}/occurrences/{OCCURRENCE_ID}\".",
// "location": "path",
// "pattern": "^projects/[^/]+/occurrences/[^/]+$",
// "required": true,
// "type": "string"
// },
// "updateMask": {
// "description": "The fields to update.",
// "format": "google-fieldmask",
// "location": "query",
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}",
// "request": {
// "$ref": "Occurrence"
// },
// "response": {
// "$ref": "Occurrence"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.projects.occurrences.setIamPolicy":
type ProjectsOccurrencesSetIamPolicyCall struct {
s *Service
resource string
setiampolicyrequest *SetIamPolicyRequest
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// SetIamPolicy: Sets the access control policy on the specified `Note`
// or `Occurrence`. Requires `containeranalysis.notes.setIamPolicy` or
// `containeranalysis.occurrences.setIamPolicy` permission if the
// resource is a `Note` or an `Occurrence`, respectively. Attempting to
// call this method without these permissions will result in a `
// `PERMISSION_DENIED` error. Attempting to call this method on a
// non-existent resource will result in a `NOT_FOUND` error if the user
// has `containeranalysis.notes.list` permission on a `Note` or
// `containeranalysis.occurrences.list` on an `Occurrence`, or a
// `PERMISSION_DENIED` error otherwise. The resource takes the following
// formats: `projects/{projectid}/occurrences/{occurrenceid}` for
// occurrences and projects/{projectid}/notes/{noteid} for notes
func (r *ProjectsOccurrencesService) SetIamPolicy(resource string, setiampolicyrequest *SetIamPolicyRequest) *ProjectsOccurrencesSetIamPolicyCall {
c := &ProjectsOccurrencesSetIamPolicyCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.resource = resource
c.setiampolicyrequest = setiampolicyrequest
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsOccurrencesSetIamPolicyCall) Fields(s ...googleapi.Field) *ProjectsOccurrencesSetIamPolicyCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsOccurrencesSetIamPolicyCall) Context(ctx context.Context) *ProjectsOccurrencesSetIamPolicyCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsOccurrencesSetIamPolicyCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsOccurrencesSetIamPolicyCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.setiampolicyrequest)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+resource}:setIamPolicy")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("POST", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"resource": c.resource,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.occurrences.setIamPolicy" call.
// Exactly one of *Policy or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Policy.ServerResponse.Header or (if a response was returned at all)
// in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to
// check whether the returned error was because http.StatusNotModified
// was returned.
func (c *ProjectsOccurrencesSetIamPolicyCall) Do(opts ...googleapi.CallOption) (*Policy, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Policy{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Sets the access control policy on the specified `Note` or `Occurrence`. Requires `containeranalysis.notes.setIamPolicy` or `containeranalysis.occurrences.setIamPolicy` permission if the resource is a `Note` or an `Occurrence`, respectively. Attempting to call this method without these permissions will result in a ` `PERMISSION_DENIED` error. Attempting to call this method on a non-existent resource will result in a `NOT_FOUND` error if the user has `containeranalysis.notes.list` permission on a `Note` or `containeranalysis.occurrences.list` on an `Occurrence`, or a `PERMISSION_DENIED` error otherwise. The resource takes the following formats: `projects/{projectid}/occurrences/{occurrenceid}` for occurrences and projects/{projectid}/notes/{noteid} for notes",
// "flatPath": "v1alpha1/projects/{projectsId}/occurrences/{occurrencesId}:setIamPolicy",
// "httpMethod": "POST",
// "id": "containeranalysis.projects.occurrences.setIamPolicy",
// "parameterOrder": [
// "resource"
// ],
// "parameters": {
// "resource": {
// "description": "REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field.",
// "location": "path",
// "pattern": "^projects/[^/]+/occurrences/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+resource}:setIamPolicy",
// "request": {
// "$ref": "SetIamPolicyRequest"
// },
// "response": {
// "$ref": "Policy"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.projects.occurrences.testIamPermissions":
type ProjectsOccurrencesTestIamPermissionsCall struct {
s *Service
resource string
testiampermissionsrequest *TestIamPermissionsRequest
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// TestIamPermissions: Returns the permissions that a caller has on the
// specified note or occurrence resource. Requires list permission on
// the project (for example, "storage.objects.list" on the containing
// bucket for testing permission of an object). Attempting to call this
// method on a non-existent resource will result in a `NOT_FOUND` error
// if the user has list permission on the project, or a
// `PERMISSION_DENIED` error otherwise. The resource takes the following
// formats: `projects/{PROJECT_ID}/occurrences/{OCCURRENCE_ID}` for
// `Occurrences` and `projects/{PROJECT_ID}/notes/{NOTE_ID}` for `Notes`
func (r *ProjectsOccurrencesService) TestIamPermissions(resource string, testiampermissionsrequest *TestIamPermissionsRequest) *ProjectsOccurrencesTestIamPermissionsCall {
c := &ProjectsOccurrencesTestIamPermissionsCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.resource = resource
c.testiampermissionsrequest = testiampermissionsrequest
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsOccurrencesTestIamPermissionsCall) Fields(s ...googleapi.Field) *ProjectsOccurrencesTestIamPermissionsCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsOccurrencesTestIamPermissionsCall) Context(ctx context.Context) *ProjectsOccurrencesTestIamPermissionsCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsOccurrencesTestIamPermissionsCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsOccurrencesTestIamPermissionsCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.testiampermissionsrequest)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+resource}:testIamPermissions")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("POST", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"resource": c.resource,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.occurrences.testIamPermissions" call.
// Exactly one of *TestIamPermissionsResponse or error will be non-nil.
// Any non-2xx status code is an error. Response headers are in either
// *TestIamPermissionsResponse.ServerResponse.Header or (if a response
// was returned at all) in error.(*googleapi.Error).Header. Use
// googleapi.IsNotModified to check whether the returned error was
// because http.StatusNotModified was returned.
func (c *ProjectsOccurrencesTestIamPermissionsCall) Do(opts ...googleapi.CallOption) (*TestIamPermissionsResponse, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &TestIamPermissionsResponse{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Returns the permissions that a caller has on the specified note or occurrence resource. Requires list permission on the project (for example, \"storage.objects.list\" on the containing bucket for testing permission of an object). Attempting to call this method on a non-existent resource will result in a `NOT_FOUND` error if the user has list permission on the project, or a `PERMISSION_DENIED` error otherwise. The resource takes the following formats: `projects/{PROJECT_ID}/occurrences/{OCCURRENCE_ID}` for `Occurrences` and `projects/{PROJECT_ID}/notes/{NOTE_ID}` for `Notes`",
// "flatPath": "v1alpha1/projects/{projectsId}/occurrences/{occurrencesId}:testIamPermissions",
// "httpMethod": "POST",
// "id": "containeranalysis.projects.occurrences.testIamPermissions",
// "parameterOrder": [
// "resource"
// ],
// "parameters": {
// "resource": {
// "description": "REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field.",
// "location": "path",
// "pattern": "^projects/[^/]+/occurrences/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+resource}:testIamPermissions",
// "request": {
// "$ref": "TestIamPermissionsRequest"
// },
// "response": {
// "$ref": "TestIamPermissionsResponse"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.projects.operations.create":
type ProjectsOperationsCreateCall struct {
s *Service
parent string
createoperationrequest *CreateOperationRequest
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Create: Creates a new `Operation`.
func (r *ProjectsOperationsService) Create(parent string, createoperationrequest *CreateOperationRequest) *ProjectsOperationsCreateCall {
c := &ProjectsOperationsCreateCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.parent = parent
c.createoperationrequest = createoperationrequest
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsOperationsCreateCall) Fields(s ...googleapi.Field) *ProjectsOperationsCreateCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsOperationsCreateCall) Context(ctx context.Context) *ProjectsOperationsCreateCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsOperationsCreateCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsOperationsCreateCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.createoperationrequest)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+parent}/operations")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("POST", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"parent": c.parent,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.operations.create" call.
// Exactly one of *Operation or error will be non-nil. Any non-2xx
// status code is an error. Response headers are in either
// *Operation.ServerResponse.Header or (if a response was returned at
// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
// to check whether the returned error was because
// http.StatusNotModified was returned.
func (c *ProjectsOperationsCreateCall) Do(opts ...googleapi.CallOption) (*Operation, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Operation{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Creates a new `Operation`.",
// "flatPath": "v1alpha1/projects/{projectsId}/operations",
// "httpMethod": "POST",
// "id": "containeranalysis.projects.operations.create",
// "parameterOrder": [
// "parent"
// ],
// "parameters": {
// "parent": {
// "description": "The project Id that this operation should be created under.",
// "location": "path",
// "pattern": "^projects/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+parent}/operations",
// "request": {
// "$ref": "CreateOperationRequest"
// },
// "response": {
// "$ref": "Operation"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.projects.operations.patch":
type ProjectsOperationsPatchCall struct {
s *Service
name string
updateoperationrequest *UpdateOperationRequest
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Patch: Updates an existing operation returns an error if operation
// does not exist. The only valid operations are to update mark the done
// bit change the result.
func (r *ProjectsOperationsService) Patch(name string, updateoperationrequest *UpdateOperationRequest) *ProjectsOperationsPatchCall {
c := &ProjectsOperationsPatchCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
c.updateoperationrequest = updateoperationrequest
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsOperationsPatchCall) Fields(s ...googleapi.Field) *ProjectsOperationsPatchCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsOperationsPatchCall) Context(ctx context.Context) *ProjectsOperationsPatchCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsOperationsPatchCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsOperationsPatchCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.updateoperationrequest)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("PATCH", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.operations.patch" call.
// Exactly one of *Operation or error will be non-nil. Any non-2xx
// status code is an error. Response headers are in either
// *Operation.ServerResponse.Header or (if a response was returned at
// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
// to check whether the returned error was because
// http.StatusNotModified was returned.
func (c *ProjectsOperationsPatchCall) Do(opts ...googleapi.CallOption) (*Operation, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Operation{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Updates an existing operation returns an error if operation does not exist. The only valid operations are to update mark the done bit change the result.",
// "flatPath": "v1alpha1/projects/{projectsId}/operations/{operationsId}",
// "httpMethod": "PATCH",
// "id": "containeranalysis.projects.operations.patch",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "name": {
// "description": "The name of the Operation. Should be of the form \"projects/{provider_id}/operations/{operation_id}\".",
// "location": "path",
// "pattern": "^projects/[^/]+/operations/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}",
// "request": {
// "$ref": "UpdateOperationRequest"
// },
// "response": {
// "$ref": "Operation"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.projects.scanConfigs.get":
type ProjectsScanConfigsGetCall struct {
s *Service
name string
urlParams_ gensupport.URLParams
ifNoneMatch_ string
ctx_ context.Context
header_ http.Header
}
// Get: Gets a specific scan configuration for a project.
func (r *ProjectsScanConfigsService) Get(name string) *ProjectsScanConfigsGetCall {
c := &ProjectsScanConfigsGetCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsScanConfigsGetCall) Fields(s ...googleapi.Field) *ProjectsScanConfigsGetCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// IfNoneMatch sets the optional parameter which makes the operation
// fail if the object's ETag matches the given value. This is useful for
// getting updates only after the object has changed since the last
// request. Use googleapi.IsNotModified to check whether the response
// error from Do is the result of In-None-Match.
func (c *ProjectsScanConfigsGetCall) IfNoneMatch(entityTag string) *ProjectsScanConfigsGetCall {
c.ifNoneMatch_ = entityTag
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsScanConfigsGetCall) Context(ctx context.Context) *ProjectsScanConfigsGetCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsScanConfigsGetCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsScanConfigsGetCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
if c.ifNoneMatch_ != "" {
reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
}
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.scanConfigs.get" call.
// Exactly one of *ScanConfig or error will be non-nil. Any non-2xx
// status code is an error. Response headers are in either
// *ScanConfig.ServerResponse.Header or (if a response was returned at
// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
// to check whether the returned error was because
// http.StatusNotModified was returned.
func (c *ProjectsScanConfigsGetCall) Do(opts ...googleapi.CallOption) (*ScanConfig, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &ScanConfig{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Gets a specific scan configuration for a project.",
// "flatPath": "v1alpha1/projects/{projectsId}/scanConfigs/{scanConfigsId}",
// "httpMethod": "GET",
// "id": "containeranalysis.projects.scanConfigs.get",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "name": {
// "description": "The name of the ScanConfig in the form projects/{project_id}/scanConfigs/{scan_config_id}",
// "location": "path",
// "pattern": "^projects/[^/]+/scanConfigs/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}",
// "response": {
// "$ref": "ScanConfig"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.projects.scanConfigs.list":
type ProjectsScanConfigsListCall struct {
s *Service
parent string
urlParams_ gensupport.URLParams
ifNoneMatch_ string
ctx_ context.Context
header_ http.Header
}
// List: Lists scan configurations for a project.
func (r *ProjectsScanConfigsService) List(parent string) *ProjectsScanConfigsListCall {
c := &ProjectsScanConfigsListCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.parent = parent
return c
}
// Filter sets the optional parameter "filter": The filter expression.
func (c *ProjectsScanConfigsListCall) Filter(filter string) *ProjectsScanConfigsListCall {
c.urlParams_.Set("filter", filter)
return c
}
// PageSize sets the optional parameter "pageSize": The number of items
// to return.
func (c *ProjectsScanConfigsListCall) PageSize(pageSize int64) *ProjectsScanConfigsListCall {
c.urlParams_.Set("pageSize", fmt.Sprint(pageSize))
return c
}
// PageToken sets the optional parameter "pageToken": The page token to
// use for the next request.
func (c *ProjectsScanConfigsListCall) PageToken(pageToken string) *ProjectsScanConfigsListCall {
c.urlParams_.Set("pageToken", pageToken)
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsScanConfigsListCall) Fields(s ...googleapi.Field) *ProjectsScanConfigsListCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// IfNoneMatch sets the optional parameter which makes the operation
// fail if the object's ETag matches the given value. This is useful for
// getting updates only after the object has changed since the last
// request. Use googleapi.IsNotModified to check whether the response
// error from Do is the result of In-None-Match.
func (c *ProjectsScanConfigsListCall) IfNoneMatch(entityTag string) *ProjectsScanConfigsListCall {
c.ifNoneMatch_ = entityTag
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsScanConfigsListCall) Context(ctx context.Context) *ProjectsScanConfigsListCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsScanConfigsListCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsScanConfigsListCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
if c.ifNoneMatch_ != "" {
reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
}
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+parent}/scanConfigs")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"parent": c.parent,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.scanConfigs.list" call.
// Exactly one of *ListScanConfigsResponse or error will be non-nil. Any
// non-2xx status code is an error. Response headers are in either
// *ListScanConfigsResponse.ServerResponse.Header or (if a response was
// returned at all) in error.(*googleapi.Error).Header. Use
// googleapi.IsNotModified to check whether the returned error was
// because http.StatusNotModified was returned.
func (c *ProjectsScanConfigsListCall) Do(opts ...googleapi.CallOption) (*ListScanConfigsResponse, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &ListScanConfigsResponse{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Lists scan configurations for a project.",
// "flatPath": "v1alpha1/projects/{projectsId}/scanConfigs",
// "httpMethod": "GET",
// "id": "containeranalysis.projects.scanConfigs.list",
// "parameterOrder": [
// "parent"
// ],
// "parameters": {
// "filter": {
// "description": "The filter expression.",
// "location": "query",
// "type": "string"
// },
// "pageSize": {
// "description": "The number of items to return.",
// "format": "int32",
// "location": "query",
// "type": "integer"
// },
// "pageToken": {
// "description": "The page token to use for the next request.",
// "location": "query",
// "type": "string"
// },
// "parent": {
// "description": "This containers the project Id i.e.: projects/{project_id}",
// "location": "path",
// "pattern": "^projects/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+parent}/scanConfigs",
// "response": {
// "$ref": "ListScanConfigsResponse"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// Pages invokes f for each page of results.
// A non-nil error returned from f will halt the iteration.
// The provided context supersedes any context provided to the Context method.
func (c *ProjectsScanConfigsListCall) Pages(ctx context.Context, f func(*ListScanConfigsResponse) error) error {
c.ctx_ = ctx
defer c.PageToken(c.urlParams_.Get("pageToken")) // reset paging to original point
for {
x, err := c.Do()
if err != nil {
return err
}
if err := f(x); err != nil {
return err
}
if x.NextPageToken == "" {
return nil
}
c.PageToken(x.NextPageToken)
}
}
// method id "containeranalysis.projects.scanConfigs.patch":
type ProjectsScanConfigsPatchCall struct {
s *Service
name string
scanconfig *ScanConfig
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Patch: Updates the scan configuration to a new value.
func (r *ProjectsScanConfigsService) Patch(name string, scanconfig *ScanConfig) *ProjectsScanConfigsPatchCall {
c := &ProjectsScanConfigsPatchCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
c.scanconfig = scanconfig
return c
}
// UpdateMask sets the optional parameter "updateMask": The fields to
// update.
func (c *ProjectsScanConfigsPatchCall) UpdateMask(updateMask string) *ProjectsScanConfigsPatchCall {
c.urlParams_.Set("updateMask", updateMask)
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProjectsScanConfigsPatchCall) Fields(s ...googleapi.Field) *ProjectsScanConfigsPatchCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProjectsScanConfigsPatchCall) Context(ctx context.Context) *ProjectsScanConfigsPatchCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProjectsScanConfigsPatchCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProjectsScanConfigsPatchCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.scanconfig)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("PATCH", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.projects.scanConfigs.patch" call.
// Exactly one of *ScanConfig or error will be non-nil. Any non-2xx
// status code is an error. Response headers are in either
// *ScanConfig.ServerResponse.Header or (if a response was returned at
// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
// to check whether the returned error was because
// http.StatusNotModified was returned.
func (c *ProjectsScanConfigsPatchCall) Do(opts ...googleapi.CallOption) (*ScanConfig, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &ScanConfig{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Updates the scan configuration to a new value.",
// "flatPath": "v1alpha1/projects/{projectsId}/scanConfigs/{scanConfigsId}",
// "httpMethod": "PATCH",
// "id": "containeranalysis.projects.scanConfigs.patch",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "name": {
// "description": "The scan config to update of the form projects/{project_id}/scanConfigs/{scan_config_id}.",
// "location": "path",
// "pattern": "^projects/[^/]+/scanConfigs/[^/]+$",
// "required": true,
// "type": "string"
// },
// "updateMask": {
// "description": "The fields to update.",
// "format": "google-fieldmask",
// "location": "query",
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}",
// "request": {
// "$ref": "ScanConfig"
// },
// "response": {
// "$ref": "ScanConfig"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.providers.notes.create":
type ProvidersNotesCreateCall struct {
s *Service
name string
note *Note
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Create: Creates a new `Note`.
func (r *ProvidersNotesService) Create(name string, note *Note) *ProvidersNotesCreateCall {
c := &ProvidersNotesCreateCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
c.note = note
return c
}
// NoteId sets the optional parameter "noteId": The ID to use for this
// note.
func (c *ProvidersNotesCreateCall) NoteId(noteId string) *ProvidersNotesCreateCall {
c.urlParams_.Set("noteId", noteId)
return c
}
// Parent sets the optional parameter "parent": This field contains the
// project Id for example: "projects/{project_id}
func (c *ProvidersNotesCreateCall) Parent(parent string) *ProvidersNotesCreateCall {
c.urlParams_.Set("parent", parent)
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProvidersNotesCreateCall) Fields(s ...googleapi.Field) *ProvidersNotesCreateCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProvidersNotesCreateCall) Context(ctx context.Context) *ProvidersNotesCreateCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProvidersNotesCreateCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProvidersNotesCreateCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.note)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}/notes")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("POST", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.providers.notes.create" call.
// Exactly one of *Note or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Note.ServerResponse.Header or (if a response was returned at all) in
// error.(*googleapi.Error).Header. Use googleapi.IsNotModified to check
// whether the returned error was because http.StatusNotModified was
// returned.
func (c *ProvidersNotesCreateCall) Do(opts ...googleapi.CallOption) (*Note, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Note{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Creates a new `Note`.",
// "flatPath": "v1alpha1/providers/{providersId}/notes",
// "httpMethod": "POST",
// "id": "containeranalysis.providers.notes.create",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "name": {
// "description": "The name of the project. Should be of the form \"providers/{provider_id}\". @Deprecated",
// "location": "path",
// "pattern": "^providers/[^/]+$",
// "required": true,
// "type": "string"
// },
// "noteId": {
// "description": "The ID to use for this note.",
// "location": "query",
// "type": "string"
// },
// "parent": {
// "description": "This field contains the project Id for example: \"projects/{project_id}",
// "location": "query",
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}/notes",
// "request": {
// "$ref": "Note"
// },
// "response": {
// "$ref": "Note"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.providers.notes.delete":
type ProvidersNotesDeleteCall struct {
s *Service
name string
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Delete: Deletes the given `Note` from the system.
func (r *ProvidersNotesService) Delete(name string) *ProvidersNotesDeleteCall {
c := &ProvidersNotesDeleteCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProvidersNotesDeleteCall) Fields(s ...googleapi.Field) *ProvidersNotesDeleteCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProvidersNotesDeleteCall) Context(ctx context.Context) *ProvidersNotesDeleteCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProvidersNotesDeleteCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProvidersNotesDeleteCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("DELETE", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.providers.notes.delete" call.
// Exactly one of *Empty or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Empty.ServerResponse.Header or (if a response was returned at all)
// in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to
// check whether the returned error was because http.StatusNotModified
// was returned.
func (c *ProvidersNotesDeleteCall) Do(opts ...googleapi.CallOption) (*Empty, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Empty{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Deletes the given `Note` from the system.",
// "flatPath": "v1alpha1/providers/{providersId}/notes/{notesId}",
// "httpMethod": "DELETE",
// "id": "containeranalysis.providers.notes.delete",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "name": {
// "description": "The name of the note in the form of \"providers/{provider_id}/notes/{NOTE_ID}\"",
// "location": "path",
// "pattern": "^providers/[^/]+/notes/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}",
// "response": {
// "$ref": "Empty"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.providers.notes.get":
type ProvidersNotesGetCall struct {
s *Service
name string
urlParams_ gensupport.URLParams
ifNoneMatch_ string
ctx_ context.Context
header_ http.Header
}
// Get: Returns the requested `Note`.
func (r *ProvidersNotesService) Get(name string) *ProvidersNotesGetCall {
c := &ProvidersNotesGetCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProvidersNotesGetCall) Fields(s ...googleapi.Field) *ProvidersNotesGetCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// IfNoneMatch sets the optional parameter which makes the operation
// fail if the object's ETag matches the given value. This is useful for
// getting updates only after the object has changed since the last
// request. Use googleapi.IsNotModified to check whether the response
// error from Do is the result of In-None-Match.
func (c *ProvidersNotesGetCall) IfNoneMatch(entityTag string) *ProvidersNotesGetCall {
c.ifNoneMatch_ = entityTag
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProvidersNotesGetCall) Context(ctx context.Context) *ProvidersNotesGetCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProvidersNotesGetCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProvidersNotesGetCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
if c.ifNoneMatch_ != "" {
reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
}
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.providers.notes.get" call.
// Exactly one of *Note or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Note.ServerResponse.Header or (if a response was returned at all) in
// error.(*googleapi.Error).Header. Use googleapi.IsNotModified to check
// whether the returned error was because http.StatusNotModified was
// returned.
func (c *ProvidersNotesGetCall) Do(opts ...googleapi.CallOption) (*Note, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Note{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Returns the requested `Note`.",
// "flatPath": "v1alpha1/providers/{providersId}/notes/{notesId}",
// "httpMethod": "GET",
// "id": "containeranalysis.providers.notes.get",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "name": {
// "description": "The name of the note in the form of \"providers/{provider_id}/notes/{NOTE_ID}\"",
// "location": "path",
// "pattern": "^providers/[^/]+/notes/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}",
// "response": {
// "$ref": "Note"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.providers.notes.getIamPolicy":
type ProvidersNotesGetIamPolicyCall struct {
s *Service
resource string
getiampolicyrequest *GetIamPolicyRequest
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// GetIamPolicy: Gets the access control policy for a note or an
// `Occurrence` resource. Requires
// `containeranalysis.notes.setIamPolicy` or
// `containeranalysis.occurrences.setIamPolicy` permission if the
// resource is a note or occurrence, respectively. Attempting to call
// this method on a resource without the required permission will result
// in a `PERMISSION_DENIED` error. Attempting to call this method on a
// non-existent resource will result in a `NOT_FOUND` error if the user
// has list permission on the project, or a `PERMISSION_DENIED` error
// otherwise. The resource takes the following formats:
// `projects/{PROJECT_ID}/occurrences/{OCCURRENCE_ID}` for occurrences
// and projects/{PROJECT_ID}/notes/{NOTE_ID} for notes
func (r *ProvidersNotesService) GetIamPolicy(resource string, getiampolicyrequest *GetIamPolicyRequest) *ProvidersNotesGetIamPolicyCall {
c := &ProvidersNotesGetIamPolicyCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.resource = resource
c.getiampolicyrequest = getiampolicyrequest
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProvidersNotesGetIamPolicyCall) Fields(s ...googleapi.Field) *ProvidersNotesGetIamPolicyCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProvidersNotesGetIamPolicyCall) Context(ctx context.Context) *ProvidersNotesGetIamPolicyCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProvidersNotesGetIamPolicyCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProvidersNotesGetIamPolicyCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.getiampolicyrequest)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+resource}:getIamPolicy")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("POST", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"resource": c.resource,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.providers.notes.getIamPolicy" call.
// Exactly one of *Policy or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Policy.ServerResponse.Header or (if a response was returned at all)
// in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to
// check whether the returned error was because http.StatusNotModified
// was returned.
func (c *ProvidersNotesGetIamPolicyCall) Do(opts ...googleapi.CallOption) (*Policy, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Policy{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Gets the access control policy for a note or an `Occurrence` resource. Requires `containeranalysis.notes.setIamPolicy` or `containeranalysis.occurrences.setIamPolicy` permission if the resource is a note or occurrence, respectively. Attempting to call this method on a resource without the required permission will result in a `PERMISSION_DENIED` error. Attempting to call this method on a non-existent resource will result in a `NOT_FOUND` error if the user has list permission on the project, or a `PERMISSION_DENIED` error otherwise. The resource takes the following formats: `projects/{PROJECT_ID}/occurrences/{OCCURRENCE_ID}` for occurrences and projects/{PROJECT_ID}/notes/{NOTE_ID} for notes",
// "flatPath": "v1alpha1/providers/{providersId}/notes/{notesId}:getIamPolicy",
// "httpMethod": "POST",
// "id": "containeranalysis.providers.notes.getIamPolicy",
// "parameterOrder": [
// "resource"
// ],
// "parameters": {
// "resource": {
// "description": "REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field.",
// "location": "path",
// "pattern": "^providers/[^/]+/notes/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+resource}:getIamPolicy",
// "request": {
// "$ref": "GetIamPolicyRequest"
// },
// "response": {
// "$ref": "Policy"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.providers.notes.list":
type ProvidersNotesListCall struct {
s *Service
name string
urlParams_ gensupport.URLParams
ifNoneMatch_ string
ctx_ context.Context
header_ http.Header
}
// List: Lists all `Notes` for a given project.
func (r *ProvidersNotesService) List(name string) *ProvidersNotesListCall {
c := &ProvidersNotesListCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
return c
}
// Filter sets the optional parameter "filter": The filter expression.
func (c *ProvidersNotesListCall) Filter(filter string) *ProvidersNotesListCall {
c.urlParams_.Set("filter", filter)
return c
}
// PageSize sets the optional parameter "pageSize": Number of notes to
// return in the list.
func (c *ProvidersNotesListCall) PageSize(pageSize int64) *ProvidersNotesListCall {
c.urlParams_.Set("pageSize", fmt.Sprint(pageSize))
return c
}
// PageToken sets the optional parameter "pageToken": Token to provide
// to skip to a particular spot in the list.
func (c *ProvidersNotesListCall) PageToken(pageToken string) *ProvidersNotesListCall {
c.urlParams_.Set("pageToken", pageToken)
return c
}
// Parent sets the optional parameter "parent": This field contains the
// project Id for example: "projects/{PROJECT_ID}".
func (c *ProvidersNotesListCall) Parent(parent string) *ProvidersNotesListCall {
c.urlParams_.Set("parent", parent)
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProvidersNotesListCall) Fields(s ...googleapi.Field) *ProvidersNotesListCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// IfNoneMatch sets the optional parameter which makes the operation
// fail if the object's ETag matches the given value. This is useful for
// getting updates only after the object has changed since the last
// request. Use googleapi.IsNotModified to check whether the response
// error from Do is the result of In-None-Match.
func (c *ProvidersNotesListCall) IfNoneMatch(entityTag string) *ProvidersNotesListCall {
c.ifNoneMatch_ = entityTag
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProvidersNotesListCall) Context(ctx context.Context) *ProvidersNotesListCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProvidersNotesListCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProvidersNotesListCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
if c.ifNoneMatch_ != "" {
reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
}
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}/notes")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.providers.notes.list" call.
// Exactly one of *ListNotesResponse or error will be non-nil. Any
// non-2xx status code is an error. Response headers are in either
// *ListNotesResponse.ServerResponse.Header or (if a response was
// returned at all) in error.(*googleapi.Error).Header. Use
// googleapi.IsNotModified to check whether the returned error was
// because http.StatusNotModified was returned.
func (c *ProvidersNotesListCall) Do(opts ...googleapi.CallOption) (*ListNotesResponse, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &ListNotesResponse{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Lists all `Notes` for a given project.",
// "flatPath": "v1alpha1/providers/{providersId}/notes",
// "httpMethod": "GET",
// "id": "containeranalysis.providers.notes.list",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "filter": {
// "description": "The filter expression.",
// "location": "query",
// "type": "string"
// },
// "name": {
// "description": "The name field will contain the project Id for example: \"providers/{provider_id} @Deprecated",
// "location": "path",
// "pattern": "^providers/[^/]+$",
// "required": true,
// "type": "string"
// },
// "pageSize": {
// "description": "Number of notes to return in the list.",
// "format": "int32",
// "location": "query",
// "type": "integer"
// },
// "pageToken": {
// "description": "Token to provide to skip to a particular spot in the list.",
// "location": "query",
// "type": "string"
// },
// "parent": {
// "description": "This field contains the project Id for example: \"projects/{PROJECT_ID}\".",
// "location": "query",
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}/notes",
// "response": {
// "$ref": "ListNotesResponse"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// Pages invokes f for each page of results.
// A non-nil error returned from f will halt the iteration.
// The provided context supersedes any context provided to the Context method.
func (c *ProvidersNotesListCall) Pages(ctx context.Context, f func(*ListNotesResponse) error) error {
c.ctx_ = ctx
defer c.PageToken(c.urlParams_.Get("pageToken")) // reset paging to original point
for {
x, err := c.Do()
if err != nil {
return err
}
if err := f(x); err != nil {
return err
}
if x.NextPageToken == "" {
return nil
}
c.PageToken(x.NextPageToken)
}
}
// method id "containeranalysis.providers.notes.patch":
type ProvidersNotesPatchCall struct {
s *Service
name string
note *Note
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Patch: Updates an existing `Note`.
func (r *ProvidersNotesService) Patch(name string, note *Note) *ProvidersNotesPatchCall {
c := &ProvidersNotesPatchCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
c.note = note
return c
}
// UpdateMask sets the optional parameter "updateMask": The fields to
// update.
func (c *ProvidersNotesPatchCall) UpdateMask(updateMask string) *ProvidersNotesPatchCall {
c.urlParams_.Set("updateMask", updateMask)
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProvidersNotesPatchCall) Fields(s ...googleapi.Field) *ProvidersNotesPatchCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProvidersNotesPatchCall) Context(ctx context.Context) *ProvidersNotesPatchCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProvidersNotesPatchCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProvidersNotesPatchCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.note)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("PATCH", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.providers.notes.patch" call.
// Exactly one of *Note or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Note.ServerResponse.Header or (if a response was returned at all) in
// error.(*googleapi.Error).Header. Use googleapi.IsNotModified to check
// whether the returned error was because http.StatusNotModified was
// returned.
func (c *ProvidersNotesPatchCall) Do(opts ...googleapi.CallOption) (*Note, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Note{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Updates an existing `Note`.",
// "flatPath": "v1alpha1/providers/{providersId}/notes/{notesId}",
// "httpMethod": "PATCH",
// "id": "containeranalysis.providers.notes.patch",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "name": {
// "description": "The name of the note. Should be of the form \"projects/{provider_id}/notes/{note_id}\".",
// "location": "path",
// "pattern": "^providers/[^/]+/notes/[^/]+$",
// "required": true,
// "type": "string"
// },
// "updateMask": {
// "description": "The fields to update.",
// "format": "google-fieldmask",
// "location": "query",
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}",
// "request": {
// "$ref": "Note"
// },
// "response": {
// "$ref": "Note"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.providers.notes.setIamPolicy":
type ProvidersNotesSetIamPolicyCall struct {
s *Service
resource string
setiampolicyrequest *SetIamPolicyRequest
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// SetIamPolicy: Sets the access control policy on the specified `Note`
// or `Occurrence`. Requires `containeranalysis.notes.setIamPolicy` or
// `containeranalysis.occurrences.setIamPolicy` permission if the
// resource is a `Note` or an `Occurrence`, respectively. Attempting to
// call this method without these permissions will result in a `
// `PERMISSION_DENIED` error. Attempting to call this method on a
// non-existent resource will result in a `NOT_FOUND` error if the user
// has `containeranalysis.notes.list` permission on a `Note` or
// `containeranalysis.occurrences.list` on an `Occurrence`, or a
// `PERMISSION_DENIED` error otherwise. The resource takes the following
// formats: `projects/{projectid}/occurrences/{occurrenceid}` for
// occurrences and projects/{projectid}/notes/{noteid} for notes
func (r *ProvidersNotesService) SetIamPolicy(resource string, setiampolicyrequest *SetIamPolicyRequest) *ProvidersNotesSetIamPolicyCall {
c := &ProvidersNotesSetIamPolicyCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.resource = resource
c.setiampolicyrequest = setiampolicyrequest
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProvidersNotesSetIamPolicyCall) Fields(s ...googleapi.Field) *ProvidersNotesSetIamPolicyCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProvidersNotesSetIamPolicyCall) Context(ctx context.Context) *ProvidersNotesSetIamPolicyCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProvidersNotesSetIamPolicyCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProvidersNotesSetIamPolicyCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.setiampolicyrequest)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+resource}:setIamPolicy")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("POST", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"resource": c.resource,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.providers.notes.setIamPolicy" call.
// Exactly one of *Policy or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Policy.ServerResponse.Header or (if a response was returned at all)
// in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to
// check whether the returned error was because http.StatusNotModified
// was returned.
func (c *ProvidersNotesSetIamPolicyCall) Do(opts ...googleapi.CallOption) (*Policy, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Policy{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Sets the access control policy on the specified `Note` or `Occurrence`. Requires `containeranalysis.notes.setIamPolicy` or `containeranalysis.occurrences.setIamPolicy` permission if the resource is a `Note` or an `Occurrence`, respectively. Attempting to call this method without these permissions will result in a ` `PERMISSION_DENIED` error. Attempting to call this method on a non-existent resource will result in a `NOT_FOUND` error if the user has `containeranalysis.notes.list` permission on a `Note` or `containeranalysis.occurrences.list` on an `Occurrence`, or a `PERMISSION_DENIED` error otherwise. The resource takes the following formats: `projects/{projectid}/occurrences/{occurrenceid}` for occurrences and projects/{projectid}/notes/{noteid} for notes",
// "flatPath": "v1alpha1/providers/{providersId}/notes/{notesId}:setIamPolicy",
// "httpMethod": "POST",
// "id": "containeranalysis.providers.notes.setIamPolicy",
// "parameterOrder": [
// "resource"
// ],
// "parameters": {
// "resource": {
// "description": "REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field.",
// "location": "path",
// "pattern": "^providers/[^/]+/notes/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+resource}:setIamPolicy",
// "request": {
// "$ref": "SetIamPolicyRequest"
// },
// "response": {
// "$ref": "Policy"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.providers.notes.testIamPermissions":
type ProvidersNotesTestIamPermissionsCall struct {
s *Service
resource string
testiampermissionsrequest *TestIamPermissionsRequest
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// TestIamPermissions: Returns the permissions that a caller has on the
// specified note or occurrence resource. Requires list permission on
// the project (for example, "storage.objects.list" on the containing
// bucket for testing permission of an object). Attempting to call this
// method on a non-existent resource will result in a `NOT_FOUND` error
// if the user has list permission on the project, or a
// `PERMISSION_DENIED` error otherwise. The resource takes the following
// formats: `projects/{PROJECT_ID}/occurrences/{OCCURRENCE_ID}` for
// `Occurrences` and `projects/{PROJECT_ID}/notes/{NOTE_ID}` for `Notes`
func (r *ProvidersNotesService) TestIamPermissions(resource string, testiampermissionsrequest *TestIamPermissionsRequest) *ProvidersNotesTestIamPermissionsCall {
c := &ProvidersNotesTestIamPermissionsCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.resource = resource
c.testiampermissionsrequest = testiampermissionsrequest
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProvidersNotesTestIamPermissionsCall) Fields(s ...googleapi.Field) *ProvidersNotesTestIamPermissionsCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProvidersNotesTestIamPermissionsCall) Context(ctx context.Context) *ProvidersNotesTestIamPermissionsCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProvidersNotesTestIamPermissionsCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProvidersNotesTestIamPermissionsCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.testiampermissionsrequest)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+resource}:testIamPermissions")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("POST", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"resource": c.resource,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.providers.notes.testIamPermissions" call.
// Exactly one of *TestIamPermissionsResponse or error will be non-nil.
// Any non-2xx status code is an error. Response headers are in either
// *TestIamPermissionsResponse.ServerResponse.Header or (if a response
// was returned at all) in error.(*googleapi.Error).Header. Use
// googleapi.IsNotModified to check whether the returned error was
// because http.StatusNotModified was returned.
func (c *ProvidersNotesTestIamPermissionsCall) Do(opts ...googleapi.CallOption) (*TestIamPermissionsResponse, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &TestIamPermissionsResponse{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Returns the permissions that a caller has on the specified note or occurrence resource. Requires list permission on the project (for example, \"storage.objects.list\" on the containing bucket for testing permission of an object). Attempting to call this method on a non-existent resource will result in a `NOT_FOUND` error if the user has list permission on the project, or a `PERMISSION_DENIED` error otherwise. The resource takes the following formats: `projects/{PROJECT_ID}/occurrences/{OCCURRENCE_ID}` for `Occurrences` and `projects/{PROJECT_ID}/notes/{NOTE_ID}` for `Notes`",
// "flatPath": "v1alpha1/providers/{providersId}/notes/{notesId}:testIamPermissions",
// "httpMethod": "POST",
// "id": "containeranalysis.providers.notes.testIamPermissions",
// "parameterOrder": [
// "resource"
// ],
// "parameters": {
// "resource": {
// "description": "REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field.",
// "location": "path",
// "pattern": "^providers/[^/]+/notes/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+resource}:testIamPermissions",
// "request": {
// "$ref": "TestIamPermissionsRequest"
// },
// "response": {
// "$ref": "TestIamPermissionsResponse"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "containeranalysis.providers.notes.occurrences.list":
type ProvidersNotesOccurrencesListCall struct {
s *Service
name string
urlParams_ gensupport.URLParams
ifNoneMatch_ string
ctx_ context.Context
header_ http.Header
}
// List: Lists `Occurrences` referencing the specified `Note`. Use this
// method to get all occurrences referencing your `Note` across all your
// customer projects.
func (r *ProvidersNotesOccurrencesService) List(name string) *ProvidersNotesOccurrencesListCall {
c := &ProvidersNotesOccurrencesListCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
return c
}
// Filter sets the optional parameter "filter": The filter expression.
func (c *ProvidersNotesOccurrencesListCall) Filter(filter string) *ProvidersNotesOccurrencesListCall {
c.urlParams_.Set("filter", filter)
return c
}
// PageSize sets the optional parameter "pageSize": Number of notes to
// return in the list.
func (c *ProvidersNotesOccurrencesListCall) PageSize(pageSize int64) *ProvidersNotesOccurrencesListCall {
c.urlParams_.Set("pageSize", fmt.Sprint(pageSize))
return c
}
// PageToken sets the optional parameter "pageToken": Token to provide
// to skip to a particular spot in the list.
func (c *ProvidersNotesOccurrencesListCall) PageToken(pageToken string) *ProvidersNotesOccurrencesListCall {
c.urlParams_.Set("pageToken", pageToken)
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *ProvidersNotesOccurrencesListCall) Fields(s ...googleapi.Field) *ProvidersNotesOccurrencesListCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// IfNoneMatch sets the optional parameter which makes the operation
// fail if the object's ETag matches the given value. This is useful for
// getting updates only after the object has changed since the last
// request. Use googleapi.IsNotModified to check whether the response
// error from Do is the result of In-None-Match.
func (c *ProvidersNotesOccurrencesListCall) IfNoneMatch(entityTag string) *ProvidersNotesOccurrencesListCall {
c.ifNoneMatch_ = entityTag
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *ProvidersNotesOccurrencesListCall) Context(ctx context.Context) *ProvidersNotesOccurrencesListCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *ProvidersNotesOccurrencesListCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *ProvidersNotesOccurrencesListCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20201211")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
if c.ifNoneMatch_ != "" {
reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
}
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}/occurrences")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "containeranalysis.providers.notes.occurrences.list" call.
// Exactly one of *ListNoteOccurrencesResponse or error will be non-nil.
// Any non-2xx status code is an error. Response headers are in either
// *ListNoteOccurrencesResponse.ServerResponse.Header or (if a response
// was returned at all) in error.(*googleapi.Error).Header. Use
// googleapi.IsNotModified to check whether the returned error was
// because http.StatusNotModified was returned.
func (c *ProvidersNotesOccurrencesListCall) Do(opts ...googleapi.CallOption) (*ListNoteOccurrencesResponse, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &ListNoteOccurrencesResponse{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Lists `Occurrences` referencing the specified `Note`. Use this method to get all occurrences referencing your `Note` across all your customer projects.",
// "flatPath": "v1alpha1/providers/{providersId}/notes/{notesId}/occurrences",
// "httpMethod": "GET",
// "id": "containeranalysis.providers.notes.occurrences.list",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "filter": {
// "description": "The filter expression.",
// "location": "query",
// "type": "string"
// },
// "name": {
// "description": "The name field will contain the note name for example: \"provider/{provider_id}/notes/{note_id}\"",
// "location": "path",
// "pattern": "^providers/[^/]+/notes/[^/]+$",
// "required": true,
// "type": "string"
// },
// "pageSize": {
// "description": "Number of notes to return in the list.",
// "format": "int32",
// "location": "query",
// "type": "integer"
// },
// "pageToken": {
// "description": "Token to provide to skip to a particular spot in the list.",
// "location": "query",
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}/occurrences",
// "response": {
// "$ref": "ListNoteOccurrencesResponse"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// Pages invokes f for each page of results.
// A non-nil error returned from f will halt the iteration.
// The provided context supersedes any context provided to the Context method.
func (c *ProvidersNotesOccurrencesListCall) Pages(ctx context.Context, f func(*ListNoteOccurrencesResponse) error) error {
c.ctx_ = ctx
defer c.PageToken(c.urlParams_.Get("pageToken")) // reset paging to original point
for {
x, err := c.Do()
if err != nil {
return err
}
if err := f(x); err != nil {
return err
}
if x.NextPageToken == "" {
return nil
}
c.PageToken(x.NextPageToken)
}
}
|
//
// Possible values:
// "NONE" - No hash requested.
// "SHA256" - A sha256 hash.
|
heap_meter.rs
|
use std::borrow::Borrow;
use heapsize_::HeapSizeOf;
use super::Meter;
/// Size limit based on the heap size of each cache item.
///
/// Requires cache entries that implement [`HeapSizeOf`][1].
///
/// [1]: https://doc.servo.org/heapsize/trait.HeapSizeOf.html
pub struct HeapSize;
impl<K, V: HeapSizeOf> Meter<K, V> for HeapSize {
type Measure = usize;
fn measure<Q: ?Sized>(&self, _: &Q, item: &V) -> usize
where
K: Borrow<Q>,
|
}
|
{
item.heap_size_of_children() + ::std::mem::size_of::<V>()
}
|
operator.js
|
const Blockly = require ("blockly");
Blockly.Blocks['op_greatherthan'] = {
init: function() {
this.appendDummyInput()
.appendField(">");
this.setOutput(true, "Operator");
this.setColour(210);
this.setTooltip("");
this.setHelpUrl("");
}
};
Blockly.Blocks['op_greatherorequal'] = {
init: function() {
this.appendDummyInput()
.appendField(">=");
this.setOutput(true, "Operator");
this.setColour(210);
this.setTooltip("");
this.setHelpUrl("");
}
};
Blockly.Blocks['op_lesserthan'] = {
init: function() {
this.appendDummyInput()
.appendField("<");
this.setOutput(true, "Operator");
this.setColour(210);
this.setTooltip("");
this.setHelpUrl("");
}
};
Blockly.Blocks['op_lesserorequal'] = {
|
this.setColour(210);
this.setTooltip("");
this.setHelpUrl("");
}
};
Blockly.Blocks['op_equal'] = {
init: function() {
this.appendDummyInput()
.appendField("=");
this.setOutput(true, "Operator");
this.setColour(210);
this.setTooltip("");
this.setHelpUrl("");
}
};
Blockly.JavaScript['op_greatherthan'] = function(block) {
let code = 'new dompp.GreaterThan()';
return [code, Blockly.JavaScript.ORDER_NONE];
};
Blockly.JavaScript['op_greatherorequal'] = function(block) {
let code = 'new dompp.GreaterOrEqual()';
return [code, Blockly.JavaScript.ORDER_NONE];
};
Blockly.JavaScript['op_lesserthan'] = function(block) {
let code = 'new dompp.LesserThan()';
return [code, Blockly.JavaScript.ORDER_NONE];
};
Blockly.JavaScript['op_lesserorequal'] = function(block) {
let code = 'new dompp.LesserOrEqual()';
return [code, Blockly.JavaScript.ORDER_NONE];
};
Blockly.JavaScript['op_equal'] = function(block) {
let code = 'new dompp.IsEqualTo()';
return [code, Blockly.JavaScript.ORDER_NONE];
};
|
init: function() {
this.appendDummyInput()
.appendField("<=");
this.setOutput(true, "Operator");
|
index.js
|
let Promise = require("bluebird")
const mongo = Promise.promisifyAll(require('mongodb').MongoClient)
let lexr = require('./token/tokenizer.js')
let _ = require("lodash-node")
const config = require("../../config")
const url = config.connection.url
/**
* Токенайзер текстів на українській мові.
* Перетворює вхідний рядок в масив токенів UK, LAT, INT, PUNCT, EOL, OTHER. Ігнорує всі пробіли.
* @module tokenizer
*/
/**
* Повертає масив токенів, збагачених vesum-інтерпретацією
* @param {string} input - Вхідний рядок.
* @return {Promise} Проміс, який буде повертати масив токенів.
*/
module.exports = input => {
let tokenizer = new lexr("");
|
tokenizer.ignoreWhiteSpace();
tokenizer.ignoreNewLine();
let output = tokenizer.tokenize(input).map( o => ({
token: (o.token == "UK1") ? "UK" : o.token,
value: o.value.replace("’","'"),
span:{
start:o.start,
length:o.value.length
}
}));
return new Promise((resolve, reject) => {
mongo.connect(url, {
useNewUrlParser: true,
useUnifiedTopology: true
})
.then( client => {
const db = client.db(config.connection.db)
const collection = db.collection(config.connection.collection)
Promise.all(
output.map( (o, index) => (o.token == "UK")
? Promise.all([
collection.find({word:o.value/*.toLowerCase()*/}).toArray().then( items => items),
collection.find({word:o.value.toLowerCase()}).toArray().then( items => items)
]
).then( res => {
o.interpretation = _.flatten(res).map( item => ({
word: item.word,
mainForm: item.mainForm,
tags: _.zipObject(item.tags,_.fill(Array(item.tags.length), true)),
tone: item.tone || 0
}))
})
: new Promise(resolve => {resolve()})
)
).then( () => {
client.close()
output = output.map( o => {
o.tone = (o.interpretation && o.interpretation[0]) ? o.interpretation[0].tone : 0
return o
})
resolve(output)
})
})
})
}
|
tokenizer.addTokenSet(require("./tokens"));
|
saml_token_info.go
|
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
package wstrust
type SamlAssertionType int
const (
SamlV1 SamlAssertionType = iota
SamlV2
|
type SamlTokenInfo struct {
AssertionType SamlAssertionType
Assertion string
}
func createSamlTokenInfo(assertionType SamlAssertionType, assertion string) *SamlTokenInfo {
tokenInfo := &SamlTokenInfo{assertionType, assertion}
return tokenInfo
}
|
)
|
ir.rs
|
use crate::builtin::env::RispEnv;
use crate::ir::llvm_type::int32_type;
use crate::ir::operate::{build_alloca, build_int32_value, build_load, build_store, call_function};
use crate::ir::string::codegen_string;
use crate::{LLVMValueWrapper, RispCallback, RispErr, RispExp};
use llvm_sys::LLVMValue;
use std::io;
use std::ptr::null_mut;
#[allow(dead_code)]
pub fn wrap_llvm_value(value: f64, llvm_ref: LLVMValueWrapper) -> RispExp {
RispExp::Number(value, llvm_ref)
}
#[allow(dead_code)]
pub fn
|
(exp: &RispExp) -> *mut LLVMValue {
match *exp {
RispExp::Number(_f, (llvm_ref, _)) => llvm_ref,
_ => panic!("failed to unwrap object: {}", exp),
}
}
pub fn load_llvm_value(env: &RispEnv, exp: &RispExp) -> *mut LLVMValue {
match exp {
RispExp::Number(_, value_ref) => {
if !value_ref.1 {
build_load(env.llvm_builder, value_ref.0, "")
} else {
value_ref.0
}
}
_ => 0 as *mut LLVMValue
}
}
pub fn eval_number(env: &RispEnv, f: f64) -> RispExp {
let llvm_input = build_alloca(env.llvm_builder, int32_type(), "");
build_store(env.llvm_builder, build_int32_value(f), llvm_input);
// let llvm_input = build_load(env.llvm_builder, llvm_input, "");
RispExp::Number(f, (llvm_input, false))
}
// printf keyword
pub fn eval_printf_fn(
env: &mut RispEnv,
func: &RispCallback,
args_eval: Result<Vec<RispExp>, RispErr>,
) -> Result<RispExp, RispErr> {
let args_eval = args_eval?;
let first_arg = args_eval.first();
let mut llvm_val: *mut LLVMValue = null_mut();
// emit IR
if let Some(RispExp::Number(_, llvm_ref)) = first_arg {
let llvm_ref = *llvm_ref;
let printf = env.built_ins["printf"];
let llvm_value = build_alloca(env.llvm_builder, int32_type(), "");
build_store(env.llvm_builder, llvm_ref.0, llvm_value);
let print_int = build_load(env.llvm_builder, llvm_value, "");
let printf_args = vec![codegen_string(env, "Result: %d\n", ""), print_int];
call_function(env.llvm_builder, printf, printf_args, "");
llvm_val = llvm_ref.0;
}
// eval print
func(env, &args_eval, Some(llvm_val))
}
// input keyword
pub fn eval_input_fn(env: &mut RispEnv, func: &RispCallback) -> Result<RispExp, RispErr> {
let input_fn = env.built_ins["input"];
let llvm_input = build_alloca(env.llvm_builder, int32_type(), "input");
let input_args = vec![codegen_string(env, "%u", ""), llvm_input];
// emit IR
call_function(env.llvm_builder, input_fn, input_args, "");
// eval
func(env, &[], Some(llvm_input))
}
// arithmetic
pub fn eval_arithmetic(
env: &mut RispEnv,
_op: &str,
func: &RispCallback,
args_eval: Result<Vec<RispExp>, RispErr>,
) -> Result<RispExp, RispErr> {
let risp_args = args_eval.unwrap();
func(env, &risp_args, None)
}
// utils
fn parse_single_float(exp: &RispExp) -> Result<f64, RispErr> {
match exp {
RispExp::Number(num, _) => Ok(*num),
_ => Err(RispErr::Reason("expected a number".to_string())),
}
}
pub fn get_input(prompt: &str) -> String {
println!("{}", prompt);
let mut input = String::new();
match io::stdin().read_line(&mut input) {
Ok(_) => {}
Err(_) => {}
}
input.trim().to_string()
}
pub fn parse_list_of_floats(args: &[RispExp]) -> Result<Vec<f64>, RispErr> {
args.iter().map(|x| parse_single_float(x)).collect()
}
|
unwrap_object
|
index.ts
|
* @param base64String your Base64 VAPID public key
*/
export default function urlBase64ToUint8Array(
base64String: string,
): Uint8Array {
return toUint8Array(base64String, atob);
}
|
import { toUint8Array } from './toUint8Array';
/**
* Converts the given VAPID public key to an Uint8Array.
|
|
urls.py
|
"""djangoBlog URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/4.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
from users import views as user_views
urlpatterns = [
path('admin/', admin.site.urls),
path('register/', user_views.register, name='register'),
path('profile/', user_views.user_profile, name='profile'),
path('login/', auth_views.LoginView.as_view(template_name='users/login.html'), name='login'),
path('logout/', auth_views.LogoutView.as_view(template_name='users/logout.html'), name='logout'),
path('', include('blog.urls')),
]
if settings.DEBUG:
|
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
|
__init__.py
|
from .module_wifi import *
|
||
config.py
|
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY = os.environ.get('SECRET_KEY') or 'hard to guess string'
SSL_DISABLE = False
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
MAIL_SERVER = 'mail.messagingengine.com'
MAIL_PORT = 587
MAIL_USE_TLS = True
MAIL_USERNAME = os.environ.get('MAIL_USERNAME')
MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD')
YNITIUM_MAIL_SUBJECT_PREFIX = '[Ynitium]'
YNITIUM_MAIL_SENDER = 'Ynitium Admin <[email protected]>'
YNITIUM_ADMIN = os.environ.get('YNITIUM_ADMIN')
YNITIUM_POSTS_PER_PAGE = 15
YNITIUM_FOLLOWERS_PER_PAGE = 50
YNITIUM_COMMENTS_PER_PAGE = 30
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.environ.get('DEV_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-dev.sqlite')
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-test.sqlite')
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data.sqlite')
@classmethod
def init_app(cls, app):
Config.init_app(app)
# email errors to the administrators
import logging
from logging.handlers import SMTPHandler
credentials = None
secure = None
if getattr(cls, 'MAIL_USERNAME', None) is not None:
credentials = (cls.MAIL_USERNAME, cls.MAIL_PASSWORD)
if getattr(cls, 'MAIL_USE_TLS', None):
secure = ()
mail_handler = SMTPHandler(
mailhost=(cls.MAIL_SERVER, cls.MAIL_PORT),
fromaddr=cls.YNITIUM_MAIL_SENDER,
toaddrs=[cls.YNITIUM_ADMIN],
subject=cls.YNITIUM_MAIL_SUBJECT_PREFIX + ' Application Error',
credentials=credentials,
secure=secure)
mail_handler.setLevel(logging.ERROR)
app.logger.addHandler(mail_handler)
class HerokuConfig(ProductionConfig):
SSL_DISABLE = bool(os.environ.get('SSL_DISABLE'))
@classmethod
def
|
(cls, app):
ProductionConfig.init_app(app)
# handle proxy server headers
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
# log to stderr
import logging
from logging import StreamHandler
file_handler = StreamHandler()
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'heroku': HerokuConfig,
'default': DevelopmentConfig
}
|
init_app
|
util.py
|
import sqlite3
from os import path
import sys
import logging
app_logger = logging.getLogger("api_logic_server_app")
def log(msg: any) -> None:
app_logger.info(msg)
# print("TIL==> " + msg)
def connection() -> sqlite3.Connection:
ROOT: str = path.dirname(path.realpath(__file__))
log(ROOT)
_connection = sqlite3.connect(path.join(ROOT, "sqlitedata.db"))
return _connection
def dbpath(dbname: str) -> str:
ROOT: str = path.dirname(path.realpath(__file__))
log('ROOT: '+ROOT)
PATH: str = path.join(ROOT, dbname)
|
log('DBPATH: '+PATH)
return PATH
|
|
JieQi.ts
|
import {Solar} from './Solar';
import {LunarUtil} from './LunarUtil';
export class JieQi {
private _name: string;
private _solar: Solar;
private _jie: boolean;
private _qi: boolean;
constructor(name: string, solar: Solar) {
let jie = false, qi = false, i, j;
for (i = 0, j = LunarUtil.JIE.length; i < j; i++) {
if (LunarUtil.JIE[i] === name) {
jie = true;
break;
}
}
if (!jie) {
for (i = 0, j = LunarUtil.QI.length; i < j; i++) {
if (LunarUtil.QI[i] === name) {
qi = true;
break;
}
}
}
this._name = name;
this._solar = solar;
this._jie = jie;
this._qi = qi;
}
getName(): string {
return this._name;
}
getSolar(): Solar {
|
setName(name: string) {
this._name = name;
}
setSolar(solar: Solar) {
this._solar = solar;
}
isJie(): boolean {
return this._jie;
}
isQi(): boolean {
return this._qi;
}
toString(): string {
return this.getName();
}
}
|
return this._solar;
}
|
keys_agent_l3.go
|
// Copyright (c) 2017 Cisco and/or its affiliates.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package l3
const (
// StaticArpPrefix is a prefix used in ETCD to store configuration for Linux static ARPs.
StaticArpPrefix = "linux/config/v1/arp/"
// StaticRoutePrefix is a prefix used in ETCD to store configuration for Linux static routes.
StaticRoutePrefix = "linux/config/v1/route/"
)
// StaticArpKeyPrefix returns the prefix used in ETCD to store config for Linux static ARPs
func StaticArpKeyPrefix() string {
return StaticArpPrefix
}
// StaticArpKey returns the prefix used in ETCD to store configuration of a particular Linux ARP entry.
func StaticArpKey(arpLabel string) string {
return StaticArpPrefix + arpLabel
}
// StaticRouteKeyPrefix returns the prefix used in ETCD to store config for Linux static routes
func StaticRouteKeyPrefix() string {
return StaticRoutePrefix
}
// StaticRouteKey returns the prefix used in ETCD to store configuration of a particular Linux route.
func
|
(routeLabel string) string {
return StaticRoutePrefix + routeLabel
}
|
StaticRouteKey
|
utils.py
|
import traceback
from pathlib import Path
import hashlib
import yaml
def get_media_dirs(media_dir_stream):
result = dict()
movie_dir_map = dict()
for media_location in media_dir_stream[0].replace('\n', '').replace('\r', '').split(','):
movie_dir_map[hashlib.md5(media_location.encode('utf-8')).hexdigest()] = Path(media_location)
tv_dir_map = dict()
for tv_location in media_dir_stream[1].replace('\n', '').replace('\r', '').split(','):
tv_dir_map[hashlib.md5(tv_location.encode('utf-8')).hexdigest()] = Path(tv_location)
result['movie_dir_map'] = movie_dir_map
result['tv_dir_map'] = tv_dir_map
return result
class Config:
__filepath = None
__config = dict()
__movie_dirs_map = dict()
|
def __init__(self, config_filepath=None):
blank_config = {
'movie_dir': list(),
'tv_dir': list()
}
self.__filepath = config_filepath
try:
with open(self.__filepath) as f:
self.__config = yaml.load(f, Loader=yaml.FullLoader)
if (
(not self.__config) or
(type(self.__config) != dict) or
(type(self.__config.get('movie_dir')) != list) or
(type(self.__config.get('tv_dir')) != list)
):
self.__config = blank_config
try:
with open(self.__filepath, 'w') as f:
data = yaml.dump(self.__config, f)
except Exception as ex:
print('Config :: update -> ', ex)
traceback.print_exc()
except Exception as ex:
self.__config = blank_config
try:
with open(self.__filepath, 'w') as f:
data = yaml.dump(self.__config, f)
except Exception as ex:
print('Config :: update -> ', ex)
traceback.print_exc()
print('Config::init: -> Creating a fresh config.yaml file')
finally:
if type(self.__config.get('movie_dir')) == list:
for media_location in self.__config.get('movie_dir'):
self.__movie_dirs_map[hashlib.md5(media_location.encode('utf-8')).hexdigest()] = Path(
media_location)
if type(self.__config.get('tv_dir')) == list:
for tv_location in self.__config.get('tv_dir'):
self.__tv_dirs_map[hashlib.md5(tv_location.encode('utf-8')).hexdigest()] = Path(tv_location)
def get(self):
return self.__config
def get_movie_dirs_map(self):
return self.__movie_dirs_map
def get_tv_dirs_map(self):
return self.__tv_dirs_map
def add_to_tv_dirs(self, new_tv_dir):
if Path(new_tv_dir).exists() and (new_tv_dir not in self.__config['tv_dir']):
self.__config['tv_dir'].append(new_tv_dir)
self.__tv_dirs_map[hashlib.md5(new_tv_dir.encode('utf-8')).hexdigest()] = Path(new_tv_dir)
def add_to_movie_dirs(self, new_movie_dir):
if Path(new_movie_dir).exists() and (new_movie_dir not in self.__config['movie_dir']):
self.__config['movie_dir'].append(new_movie_dir)
self.__movie_dirs_map[hashlib.md5(new_movie_dir.encode('utf-8')).hexdigest()] = Path(new_movie_dir)
def remove_from_movie_dirs(self, movie_dir):
if self.__config['movie_dir'] and movie_dir in self.__config['movie_dir']:
self.__config['movie_dir'].remove(movie_dir)
del self.__movie_dirs_map[hashlib.md5(movie_dir.encode('utf-8')).hexdigest()]
def remove_from_tv_dirs(self, tv_dir):
if self.__config['tv_dir'] and tv_dir in self.__config['tv_dir']:
self.__config['tv_dir'].remove(tv_dir)
del self.__tv_dirs_map[hashlib.md5(tv_dir.encode('utf-8')).hexdigest()]
def refresh(self):
try:
with open(self.__filepath) as f:
self.__config = yaml.load(f, Loader=yaml.FullLoader)
if type(self.__config.get('movie_dir')) == list:
for media_location in self.__config.get('movie_dir'):
self.__movie_dirs_map[hashlib.md5(media_location.encode('utf-8')).hexdigest()] = Path(
media_location)
if type(self.__config.get('tv_dir')) == list:
for tv_location in self.__config.get('tv_dir'):
self.__tv_dirs_map[hashlib.md5(tv_location.encode('utf-8')).hexdigest()] = Path(tv_location)
except Exception as ex:
print('Config :: init -> ', ex)
traceback.print_exc()
def update(self, updated_config=None):
if updated_config:
self.__config = updated_config
try:
with open(self.__filepath, 'w') as f:
data = yaml.dump(self.__config, f)
except Exception as ex:
print('Config :: update -> ', ex)
traceback.print_exc()
|
__tv_dirs_map = dict()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.